(improvement)(build) Add spotless during the build process. (#1639)

This commit is contained in:
lexluo09
2024-09-07 00:36:17 +08:00
committed by GitHub
parent ee15a88b06
commit 5f59e89eea
986 changed files with 15609 additions and 12706 deletions

View File

@@ -40,12 +40,13 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
return dbs;
}
public List<String> getTables(ConnectInfo connectionInfo, String schemaName) throws SQLException {
public List<String> getTables(ConnectInfo connectionInfo, String schemaName)
throws SQLException {
List<String> tablesAndViews = new ArrayList<>();
DatabaseMetaData metaData = getDatabaseMetaData(connectionInfo);
try (ResultSet resultSet = metaData.getTables(schemaName, schemaName, null,
new String[]{"TABLE", "VIEW"})) {
try (ResultSet resultSet =
metaData.getTables(schemaName, schemaName, null, new String[] {"TABLE", "VIEW"})) {
while (resultSet.next()) {
String name = resultSet.getString("TABLE_NAME");
tablesAndViews.add(name);
@@ -56,7 +57,8 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
return tablesAndViews;
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName) throws SQLException {
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(schemaName, schemaName, tableName, null);
@@ -70,9 +72,11 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
}
protected DatabaseMetaData getDatabaseMetaData(ConnectInfo connectionInfo) throws SQLException {
Connection connection = DriverManager.getConnection(connectionInfo.getUrl(),
connectionInfo.getUserName(), connectionInfo.getPassword());
Connection connection =
DriverManager.getConnection(
connectionInfo.getUrl(),
connectionInfo.getUserName(),
connectionInfo.getPassword());
return connection.getMetaData();
}
}

View File

@@ -13,9 +13,11 @@ public class ClickHouseAdaptor extends BaseDbAdaptor {
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "toYYYYMM(toDate(parseDateTimeBestEffort(toString(%s))))".replace("%s", column);
return "toYYYYMM(toDate(parseDateTimeBestEffort(toString(%s))))"
.replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "toMonday(toDate(parseDateTimeBestEffort(toString(%s))))".replace("%s", column);
return "toMonday(toDate(parseDateTimeBestEffort(toString(%s))))"
.replace("%s", column);
} else {
return "toDate(parseDateTimeBestEffort(toString(%s)))".replace("%s", column);
}
@@ -39,5 +41,4 @@ public class ClickHouseAdaptor extends BaseDbAdaptor {
functionMap.put("YEAR".toLowerCase(), "toYear");
return SqlReplaceHelper.replaceFunction(sql, functionMap);
}
}

View File

@@ -2,12 +2,11 @@ package com.tencent.supersonic.headless.core.adaptor.db;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import java.sql.SQLException;
import java.util.List;
/**
* Adapters for different query engines to obtain table, field, and time formatting methods
*/
/** Adapters for different query engines to obtain table, field, and time formatting methods */
public interface DbAdaptor {
String getDateFormat(String dateType, String dateFormat, String column);
@@ -18,6 +17,6 @@ public interface DbAdaptor {
List<String> getTables(ConnectInfo connectInfo, String schemaName) throws SQLException;
List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName) throws SQLException;
List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException;
}

View File

@@ -5,7 +5,6 @@ import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import java.util.HashMap;
import java.util.Map;
public class DbAdaptorFactory {
private static Map<String, DbAdaptor> dbAdaptorMap;
@@ -22,5 +21,4 @@ public class DbAdaptorFactory {
public static DbAdaptor getEngineAdaptor(String engineType) {
return dbAdaptorMap.get(engineType);
}
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.adaptor.db;
public class DefaultDbAdaptor extends BaseDbAdaptor {
@Override
@@ -12,5 +11,4 @@ public class DefaultDbAdaptor extends BaseDbAdaptor {
public String functionNameCorrector(String sql) {
return sql;
}
}

View File

@@ -9,15 +9,18 @@ public class H2Adaptor extends BaseDbAdaptor {
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyyMMdd'),'yyyy-MM')".replace("%s", column);
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyyMMdd'),'yyyy-MM')"
.replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "DATE_TRUNC('week',%s)".replace("%s", column);
} else {
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyyMMdd'),'yyyy-MM-dd')".replace("%s", column);
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyyMMdd'),'yyyy-MM-dd')"
.replace("%s", column);
}
} else if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyy-MM-dd'),'yyyy-MM') ".replace("%s", column);
return "FORMATDATETIME(PARSEDATETIME(%s, 'yyyy-MM-dd'),'yyyy-MM') "
.replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "DATE_TRUNC('week',%s)".replace("%s", column);
} else {
@@ -31,5 +34,4 @@ public class H2Adaptor extends BaseDbAdaptor {
public String functionNameCorrector(String sql) {
return sql;
}
}

View File

@@ -1,21 +1,19 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
public class MysqlAdaptor extends BaseDbAdaptor {
/**
* transform YYYYMMDD to YYYY-MM-DD YYYY-MM YYYY-MM-DD(MONDAY)
*/
/** transform YYYYMMDD to YYYY-MM-DD YYYY-MM YYYY-MM-DD(MONDAY) */
@Override
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "DATE_FORMAT(%s, '%Y-%m')".replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "DATE_FORMAT(DATE_SUB(%s, INTERVAL (DAYOFWEEK(%s) - 2) DAY), '%Y-%m-%d')".replace("%s", column);
return "DATE_FORMAT(DATE_SUB(%s, INTERVAL (DAYOFWEEK(%s) - 2) DAY), '%Y-%m-%d')"
.replace("%s", column);
} else {
return "date_format(str_to_date(%s, '%Y%m%d'),'%Y-%m-%d')".replace("%s", column);
}
@@ -23,7 +21,8 @@ public class MysqlAdaptor extends BaseDbAdaptor {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "DATE_FORMAT(%s, '%Y-%m') ".replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "DATE_FORMAT(DATE_SUB(%s, INTERVAL (DAYOFWEEK(%s) - 2) DAY), '%Y-%m-%d')".replace("%s", column);
return "DATE_FORMAT(DATE_SUB(%s, INTERVAL (DAYOFWEEK(%s) - 2) DAY), '%Y-%m-%d')"
.replace("%s", column);
} else {
return column;
}
@@ -35,5 +34,4 @@ public class MysqlAdaptor extends BaseDbAdaptor {
public String functionNameCorrector(String sql) {
return sql;
}
}

View File

@@ -26,9 +26,11 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
public String getDateFormat(String dateType, String dateFormat, String column) {
if (dateFormat.equalsIgnoreCase(Constants.DAY_FORMAT_INT)) {
if (TimeDimensionEnum.MONTH.name().equalsIgnoreCase(dateType)) {
return "formatDateTime(toDate(parseDateTimeBestEffort(toString(%s))),'%Y-%m')".replace("%s", column);
return "formatDateTime(toDate(parseDateTimeBestEffort(toString(%s))),'%Y-%m')"
.replace("%s", column);
} else if (TimeDimensionEnum.WEEK.name().equalsIgnoreCase(dateType)) {
return "toMonday(toDate(parseDateTimeBestEffort(toString(%s))))".replace("%s", column);
return "toMonday(toDate(parseDateTimeBestEffort(toString(%s))))"
.replace("%s", column);
} else {
return "toDate(parseDateTimeBestEffort(toString(%s)))".replace("%s", column);
}
@@ -51,38 +53,45 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
functionMap.put("DAY".toLowerCase(), "TO_CHAR");
functionMap.put("YEAR".toLowerCase(), "TO_CHAR");
Map<String, UnaryOperator> functionCall = new HashMap<>();
functionCall.put("MONTH".toLowerCase(), o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("MM"));
return expressionList;
}
return o;
});
functionCall.put("DAY".toLowerCase(), o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("dd"));
return expressionList;
}
return o;
});
functionCall.put("YEAR".toLowerCase(), o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("YYYY"));
return expressionList;
}
return o;
});
functionCall.put(
"MONTH".toLowerCase(),
o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("MM"));
return expressionList;
}
return o;
});
functionCall.put(
"DAY".toLowerCase(),
o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("dd"));
return expressionList;
}
return o;
});
functionCall.put(
"YEAR".toLowerCase(),
o -> {
if (Objects.nonNull(o) && o instanceof ExpressionList) {
ExpressionList expressionList = (ExpressionList) o;
expressionList.add(new StringValue("YYYY"));
return expressionList;
}
return o;
});
return SqlReplaceHelper.replaceFunction(sql, functionMap, functionCall);
}
public List<String> getTables(ConnectInfo connectionInfo, String schemaName) throws SQLException {
public List<String> getTables(ConnectInfo connectionInfo, String schemaName)
throws SQLException {
List<String> tablesAndViews = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectionInfo);
try (ResultSet resultSet = metaData.getTables(null, null, null,
new String[]{"TABLE", "VIEW"})) {
try (ResultSet resultSet =
metaData.getTables(null, null, null, new String[] {"TABLE", "VIEW"})) {
while (resultSet.next()) {
String name = resultSet.getString("TABLE_NAME");
tablesAndViews.add(name);
@@ -93,7 +102,8 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
return tablesAndViews;
}
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName) throws SQLException {
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException {
List<DBColumn> dbColumns = Lists.newArrayList();
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
ResultSet columns = metaData.getColumns(null, null, tableName, null);
@@ -105,5 +115,4 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
}
return dbColumns;
}
}

View File

@@ -22,6 +22,4 @@ public class CacheCommonConfig {
@Value("${s2.query.cache.enable:true}")
private Boolean cacheEnable;
}
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.cache;
public interface CacheManager {
Boolean put(String key, Object value);
@@ -10,5 +9,4 @@ public interface CacheManager {
String generateCacheKey(String prefix, String body);
Boolean removeCache(String key);
}

View File

@@ -12,8 +12,7 @@ import java.util.concurrent.TimeUnit;
@Configuration
public class CaffeineCacheConfig {
@Autowired
private CacheCommonConfig cacheCommonConfig;
@Autowired private CacheCommonConfig cacheCommonConfig;
@Value("${s2.caffeine.initial.capacity:500}")
private Integer caffeineInitialCapacity;
@@ -24,7 +23,8 @@ public class CaffeineCacheConfig {
@Bean(name = "caffeineCache")
public Cache<String, Object> caffeineCache() {
return Caffeine.newBuilder()
.expireAfterWrite(cacheCommonConfig.getCacheCommonExpireAfterWrite(), TimeUnit.MINUTES)
.expireAfterWrite(
cacheCommonConfig.getCacheCommonExpireAfterWrite(), TimeUnit.MINUTES)
.initialCapacity(caffeineInitialCapacity)
.maximumSize(caffeineMaximumSize)
.build();

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.cache;
import com.github.benmanes.caffeine.cache.Cache;
import com.google.common.base.Joiner;
import lombok.extern.slf4j.Slf4j;
@@ -13,8 +12,7 @@ import org.springframework.stereotype.Component;
@Slf4j
public class CaffeineCacheManager implements CacheManager {
@Autowired
private CacheCommonConfig cacheCommonConfig;
@Autowired private CacheCommonConfig cacheCommonConfig;
@Autowired
@Qualifier("caffeineCache")
@@ -39,8 +37,13 @@ public class CaffeineCacheManager implements CacheManager {
if (StringUtils.isEmpty(prefix)) {
prefix = "-1";
}
return Joiner.on(":").join(cacheCommonConfig.getCacheCommonApp(), cacheCommonConfig.getCacheCommonEnv(),
cacheCommonConfig.getCacheCommonVersion(), prefix, body);
return Joiner.on(":")
.join(
cacheCommonConfig.getCacheCommonApp(),
cacheCommonConfig.getCacheCommonEnv(),
cacheCommonConfig.getCacheCommonVersion(),
prefix,
body);
}
@Override

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.cache;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import lombok.extern.slf4j.Slf4j;
@@ -30,10 +29,11 @@ public class DefaultQueryCache implements QueryCache {
CacheCommonConfig cacheCommonConfig = ContextUtils.getBean(CacheCommonConfig.class);
if (cacheCommonConfig.getCacheEnable() && Objects.nonNull(value)) {
CompletableFuture.supplyAsync(() -> cacheManager.put(cacheKey, value))
.exceptionally(exception -> {
log.warn("exception:", exception);
return null;
});
.exceptionally(
exception -> {
log.warn("exception:", exception);
return null;
});
log.debug("put to cache, key: {}", cacheKey);
return true;
}
@@ -48,7 +48,8 @@ public class DefaultQueryCache implements QueryCache {
}
private String getKeyByModelIds(List<Long> modelIds) {
return String.join(",", modelIds.stream().map(Object::toString).collect(Collectors.toList()));
return String.join(
",", modelIds.stream().map(Object::toString).collect(Collectors.toList()));
}
private boolean isCache(SemanticQueryReq semanticQueryReq) {
@@ -61,5 +62,4 @@ public class DefaultQueryCache implements QueryCache {
}
return false;
}
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.cache;
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
public interface QueryCache {
@@ -10,5 +9,4 @@ public interface QueryCache {
Boolean put(String cacheKey, Object value);
String getCacheKey(SemanticQueryReq semanticQueryReq);
}

View File

@@ -3,11 +3,10 @@ package com.tencent.supersonic.headless.core.config;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
@Data
@Configuration
public class AggregatorConfig {
@Value("${s2.metric.aggregator.ratio.enable:true}")
private Boolean enableRatio;
}

View File

@@ -9,20 +9,13 @@ import lombok.NoArgsConstructor;
@NoArgsConstructor
public class DefaultMetric {
/**
* default metrics
*/
/** default metrics */
private Long metricId;
/**
* default time span unit
*/
/** default time span unit */
private Integer unit;
/**
* default time type: DAY
* DAY, WEEK, MONTH, YEAR
*/
/** default time type: DAY DAY, WEEK, MONTH, YEAR */
private String period;
private String bizName;
@@ -39,4 +32,4 @@ public class DefaultMetric {
this.unit = unit;
this.period = period;
}
}
}

View File

@@ -4,29 +4,17 @@ import com.tencent.supersonic.common.pojo.Constants;
import lombok.Data;
import lombok.ToString;
/**
* default metrics about the model
*/
/** default metrics about the model */
@ToString
@Data
public class DefaultMetricInfo {
/**
* default metrics
*/
/** default metrics */
private Long metricId;
/**
* default time span unit
*/
/** default time span unit */
private Integer unit = 1;
/**
* default time type: day
* DAY, WEEK, MONTH, YEAR
*/
/** default time type: day DAY, WEEK, MONTH, YEAR */
private String period = Constants.DAY;
}
}

View File

@@ -40,5 +40,4 @@ public class DefaultSemanticConfig {
@Value("${s2.explain.path:/api/semantic/query/explain}")
private String explainPath;
}

View File

@@ -18,5 +18,4 @@ public class Dim4Dict {
private List<String> blackList;
private List<String> whiteList;
private List<String> ruleList;
}
}

View File

@@ -4,13 +4,10 @@ import lombok.Data;
import java.util.List;
/**
* when query an entity, return related dimension/metric info
*/
/** when query an entity, return related dimension/metric info */
@Data
public class EntityDetailData {
private List<Long> dimensionIds;
private List<Long> metricIds;
}
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.config;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import lombok.Data;
@@ -12,4 +11,4 @@ public class EntityInternalDetail {
List<DimSchemaResp> dimensionList;
List<MetricSchemaResp> metricList;
}
}

View File

@@ -10,8 +10,10 @@ public class ExecutorConfig {
@Value("${s2.metricParser.agg.mysql.lowVersion:5.7}")
private String mysqlLowVersion;
@Value("${s2.metricParser.agg.ck.lowVersion:20.4}")
private String ckLowVersion;
@Value("${s2.internal.metric.cnt.suffix:internal_cnt}")
private String internalMetricNameSuffix;

View File

@@ -1,20 +1,12 @@
package com.tencent.supersonic.headless.core.executor;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.core.pojo.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.TimeRange;
import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable;
import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable.Builder;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.pojo.Materialization;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
@@ -45,9 +37,16 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.springframework.util.CollectionUtils;
/**
* abstract of accelerator , provide Basic methods
*/
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
/** abstract of accelerator , provide Basic methods */
@Slf4j
public abstract class AbstractAccelerator implements QueryAccelerator {
@@ -56,11 +55,13 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
public static final String MATERIALIZATION_SYS_VIEW = "sys_view";
public static final String MATERIALIZATION_SYS_PARTITION = "sys_partition";
/**
* check if a materialization match the fields and partitions
*/
protected boolean check(RelOptPlanner relOptPlanner, RelBuilder relBuilder,
CalciteCatalogReader calciteCatalogReader, Materialization materialization, List<String> fields,
/** check if a materialization match the fields and partitions */
protected boolean check(
RelOptPlanner relOptPlanner,
RelBuilder relBuilder,
CalciteCatalogReader calciteCatalogReader,
Materialization materialization,
List<String> fields,
List<ImmutablePair<String, String>> partitions) {
if (!materialization.isPartitioned()) {
return fields.stream().allMatch(f -> materialization.getColumns().contains(f));
@@ -75,7 +76,8 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
Set<String> materializationFields = new HashSet<>(viewFields);
materializationFields.addAll(queryFields);
List<String> materializationFieldList = materializationFields.stream().collect(Collectors.toList());
List<String> materializationFieldList =
materializationFields.stream().collect(Collectors.toList());
relBuilder.clear();
if (!CollectionUtils.isEmpty(relOptPlanner.getMaterializations())) {
@@ -83,32 +85,41 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
}
Materialization viewMaterialization = Materialization.builder().build();
viewMaterialization.setName(String.format("%s.%s", MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_VIEW));
viewMaterialization.setName(
String.format("%s.%s", MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_VIEW));
viewMaterialization.setColumns(viewFieldList);
addMaterialization(calciteCatalogReader.getRootSchema(), viewMaterialization);
Materialization queryMaterialization = Materialization.builder().build();
queryMaterialization.setName(String.format("%s.%s", MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
queryMaterialization.setName(
String.format("%s.%s", MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
queryMaterialization.setColumns(materializationFieldList);
addMaterialization(calciteCatalogReader.getRootSchema(), queryMaterialization);
RelNode replacement = relBuilder.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_VIEW)).build();
RelBuilder viewBuilder = relBuilder.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
RelNode replacement =
relBuilder
.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_VIEW))
.build();
RelBuilder viewBuilder =
relBuilder.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
if (materialization.isPartitioned()) {
RexNode viewFilter = getRexNode(relBuilder, materialization,
MATERIALIZATION_SYS_PARTITION);
RexNode viewFilter =
getRexNode(relBuilder, materialization, MATERIALIZATION_SYS_PARTITION);
viewBuilder = viewBuilder.filter(viewFilter);
}
RelNode viewRel = project(viewBuilder, viewFieldList).build();
List<String> view = Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_VIEW);
RelOptMaterialization relOptMaterialization = new RelOptMaterialization(replacement, viewRel, null,
view);
RelOptMaterialization relOptMaterialization =
new RelOptMaterialization(replacement, viewRel, null, view);
relOptPlanner.addMaterialization(relOptMaterialization);
RelBuilder checkBuilder = relBuilder.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
RelBuilder checkBuilder =
relBuilder.scan(Arrays.asList(MATERIALIZATION_SYS_DB, MATERIALIZATION_SYS_SOURCE));
if (materialization.isPartitioned()) {
checkBuilder = checkBuilder.filter(getRexNode(checkBuilder, partitions, MATERIALIZATION_SYS_PARTITION));
checkBuilder =
checkBuilder.filter(
getRexNode(checkBuilder, partitions, MATERIALIZATION_SYS_PARTITION));
}
RelNode checkRel = project(checkBuilder, queryFieldList).build();
relOptPlanner.setRoot(checkRel);
@@ -124,11 +135,12 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
protected CalciteCatalogReader getCalciteCatalogReader() {
CalciteCatalogReader calciteCatalogReader;
CalciteSchema viewSchema = SchemaBuilder.getMaterializationSchema();
calciteCatalogReader = new CalciteCatalogReader(
CalciteSchema.from(viewSchema.plus()),
CalciteSchema.from(viewSchema.plus()).path(null),
Configuration.typeFactory,
new CalciteConnectionConfigImpl(new Properties()));
calciteCatalogReader =
new CalciteCatalogReader(
CalciteSchema.from(viewSchema.plus()),
CalciteSchema.from(viewSchema.plus()).path(null),
Configuration.typeFactory,
new CalciteConnectionConfigImpl(new Properties()));
return calciteCatalogReader;
}
@@ -139,8 +151,8 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
return relOptPlanner;
}
protected RelBuilder builderMaterializationPlan(CalciteCatalogReader calciteCatalogReader,
RelOptPlanner relOptPlanner) {
protected RelBuilder builderMaterializationPlan(
CalciteCatalogReader calciteCatalogReader, RelOptPlanner relOptPlanner) {
relOptPlanner.addRelTraitDef(ConventionTraitDef.INSTANCE);
relOptPlanner.addRelTraitDef(RelDistributionTraitDef.INSTANCE);
EnumerableRules.rules().forEach(relOptPlanner::addRule);
@@ -149,7 +161,8 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
return RelFactories.LOGICAL_BUILDER.create(relOptCluster, calciteCatalogReader);
}
protected void addMaterialization(CalciteSchema dataSetSchema, Materialization materialization) {
protected void addMaterialization(
CalciteSchema dataSetSchema, Materialization materialization) {
String[] dbTable = materialization.getName().split("\\.");
String tb = dbTable[1].toLowerCase();
String db = dbTable[0].toLowerCase();
@@ -171,31 +184,38 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
} else {
dataSetSchema.add(tb, srcTable);
}
}
protected Set<String> extractTableNames(RelNode relNode) {
Set<String> tableNames = new HashSet<>();
RelShuttle shuttle = new RelHomogeneousShuttle() {
public RelNode visit(TableScan scan) {
RelOptTable table = scan.getTable();
tableNames.addAll(table.getQualifiedName());
return scan;
}
};
RelShuttle shuttle =
new RelHomogeneousShuttle() {
public RelNode visit(TableScan scan) {
RelOptTable table = scan.getTable();
tableNames.addAll(table.getQualifiedName());
return scan;
}
};
relNode.accept(shuttle);
return tableNames;
}
protected RexNode getRexNodeByTimeRange(RelBuilder relBuilder, TimeRange timeRange, String field) {
return relBuilder.call(SqlStdOperatorTable.AND,
relBuilder.call(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, relBuilder.field(field),
protected RexNode getRexNodeByTimeRange(
RelBuilder relBuilder, TimeRange timeRange, String field) {
return relBuilder.call(
SqlStdOperatorTable.AND,
relBuilder.call(
SqlStdOperatorTable.GREATER_THAN_OR_EQUAL,
relBuilder.field(field),
relBuilder.literal(timeRange.getStart())),
relBuilder.call(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, relBuilder.field(field),
relBuilder.call(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
relBuilder.field(field),
relBuilder.literal(timeRange.getEnd())));
}
protected RexNode getRexNode(RelBuilder relBuilder, Materialization materialization, String viewField) {
protected RexNode getRexNode(
RelBuilder relBuilder, Materialization materialization, String viewField) {
RexNode rexNode = null;
for (String partition : materialization.getPartitions()) {
TimeRange timeRange = TimeRange.builder().start(partition).end(partition).build();
@@ -203,32 +223,50 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
rexNode = getRexNodeByTimeRange(relBuilder, timeRange, viewField);
continue;
}
rexNode = relBuilder.call(SqlStdOperatorTable.OR, rexNode,
getRexNodeByTimeRange(relBuilder, timeRange, viewField));
rexNode =
relBuilder.call(
SqlStdOperatorTable.OR,
rexNode,
getRexNodeByTimeRange(relBuilder, timeRange, viewField));
}
return rexNode;
}
protected RexNode getRexNode(RelBuilder relBuilder, List<ImmutablePair<String, String>> timeRanges,
protected RexNode getRexNode(
RelBuilder relBuilder,
List<ImmutablePair<String, String>> timeRanges,
String viewField) {
RexNode rexNode = null;
for (ImmutablePair<String, String> timeRange : timeRanges) {
if (rexNode == null) {
rexNode = getRexNodeByTimeRange(relBuilder,
TimeRange.builder().start(timeRange.left).end(timeRange.right).build(),
viewField);
rexNode =
getRexNodeByTimeRange(
relBuilder,
TimeRange.builder()
.start(timeRange.left)
.end(timeRange.right)
.build(),
viewField);
continue;
}
rexNode = relBuilder.call(SqlStdOperatorTable.OR, rexNode,
getRexNodeByTimeRange(relBuilder,
TimeRange.builder().start(timeRange.left).end(timeRange.right).build(),
viewField));
rexNode =
relBuilder.call(
SqlStdOperatorTable.OR,
rexNode,
getRexNodeByTimeRange(
relBuilder,
TimeRange.builder()
.start(timeRange.left)
.end(timeRange.right)
.build(),
viewField));
}
return rexNode;
}
private static RelBuilder project(RelBuilder relBuilder, List<String> fields) {
List<RexNode> rexNodes = fields.stream().map(f -> relBuilder.field(f)).collect(Collectors.toList());
List<RexNode> rexNodes =
fields.stream().map(f -> relBuilder.field(f)).collect(Collectors.toList());
return relBuilder.project(rexNodes);
}
}

View File

@@ -26,8 +26,10 @@ public class JdbcExecutor implements QueryExecutor {
for (QueryAccelerator queryAccelerator : ComponentFactory.getQueryAccelerators()) {
if (queryAccelerator.check(queryStatement)) {
SemanticQueryResp semanticQueryResp = queryAccelerator.query(queryStatement);
if (Objects.nonNull(semanticQueryResp) && !semanticQueryResp.getResultList().isEmpty()) {
log.info("query by Accelerator {}", queryAccelerator.getClass().getSimpleName());
if (Objects.nonNull(semanticQueryResp)
&& !semanticQueryResp.getResultList().isEmpty()) {
log.info(
"query by Accelerator {}", queryAccelerator.getClass().getSimpleName());
return semanticQueryResp;
}
}
@@ -43,5 +45,4 @@ public class JdbcExecutor implements QueryExecutor {
queryResultWithColumns.setSql(sql);
return queryResultWithColumns;
}
}

View File

@@ -4,8 +4,8 @@ import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* customize various query media ( like duckDb redis) to improved query performance
* check ok and query successful , return SemanticQueryResp to interface immediately
* customize various query media ( like duckDb redis) to improved query performance check ok and
* query successful , return SemanticQueryResp to interface immediately
*/
public interface QueryAccelerator {

View File

@@ -3,9 +3,7 @@ package com.tencent.supersonic.headless.core.executor;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* QueryExecutor submits SQL to the database engine and performs acceleration if necessary.
*/
/** QueryExecutor submits SQL to the database engine and performs acceleration if necessary. */
public interface QueryExecutor {
boolean accept(QueryStatement queryStatement);

View File

@@ -1,12 +1,10 @@
package com.tencent.supersonic.headless.core.pojo;
import lombok.Data;
@Data
public class ConnectInfo {
private String url;
private String userName;
@@ -14,5 +12,4 @@ public class ConnectInfo {
private String password;
private String database;
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.RecordInfo;
import com.tencent.supersonic.common.util.AESEncryptionUtil;
@@ -36,9 +35,7 @@ public class Database extends RecordInfo {
private String database;
private String schema;
/**
* mysql,clickhouse
*/
/** mysql,clickhouse */
private String type;
private List<String> admins = Lists.newArrayList();
@@ -48,5 +45,4 @@ public class Database extends RecordInfo {
public String passwordDecrypt() {
return AESEncryptionUtil.aesDecryptECB(password);
}
}

View File

@@ -1,25 +1,24 @@
package com.tencent.supersonic.headless.core.pojo;
import javax.sql.DataSource;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
/**
* duckDb connection session object
*/
/** duckDb connection session object */
@Component
@Slf4j
public class DuckDbSource {
@@ -62,8 +61,10 @@ public class DuckDbSource {
}
protected void init(JdbcTemplate jdbcTemplate) {
jdbcTemplate.execute(String.format("SET memory_limit = '%sGB';", executorConfig.getMemoryLimit()));
jdbcTemplate.execute(String.format("SET temp_directory='%s';", executorConfig.getDuckDbTemp()));
jdbcTemplate.execute(
String.format("SET memory_limit = '%sGB';", executorConfig.getMemoryLimit()));
jdbcTemplate.execute(
String.format("SET temp_directory='%s';", executorConfig.getDuckDbTemp()));
jdbcTemplate.execute(String.format("SET threads TO %s;", executorConfig.getThreads()));
jdbcTemplate.execute("SET enable_object_cache = true;");
}
@@ -81,21 +82,23 @@ public class DuckDbSource {
}
public void query(String sql, SemanticQueryResp queryResultWithColumns) {
duckDbJdbcTemplate.query(sql, rs -> {
if (null == rs) {
return queryResultWithColumns;
}
ResultSetMetaData metaData = rs.getMetaData();
List<QueryColumn> queryColumns = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String key = metaData.getColumnLabel(i);
queryColumns.add(new QueryColumn(key, metaData.getColumnTypeName(i)));
}
queryResultWithColumns.setColumns(queryColumns);
List<Map<String, Object>> resultList = buildResult(rs);
queryResultWithColumns.setResultList(resultList);
return queryResultWithColumns;
});
duckDbJdbcTemplate.query(
sql,
rs -> {
if (null == rs) {
return queryResultWithColumns;
}
ResultSetMetaData metaData = rs.getMetaData();
List<QueryColumn> queryColumns = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String key = metaData.getColumnLabel(i);
queryColumns.add(new QueryColumn(key, metaData.getColumnTypeName(i)));
}
queryResultWithColumns.setColumns(queryColumns);
List<Map<String, Object>> resultList = buildResult(rs);
queryResultWithColumns.setResultList(resultList);
return queryResultWithColumns;
});
}
public static List<Map<String, Object>> buildResult(ResultSet resultSet) {
@@ -148,7 +151,8 @@ public class DuckDbSource {
row.put(column, resultSet.getObject(i));
break;
default:
throw new Exception("get result row type not found :" + rsMeta.getColumnType(i));
throw new Exception(
"get result row type not found :" + rsMeta.getColumnType(i));
}
}
list.add(row);

View File

@@ -203,8 +203,10 @@ public class JdbcDataSource {
// default validation query
String driverName = druidDataSource.getDriverClassName();
if (driverName.indexOf("sqlserver") != -1 || driverName.indexOf("mysql") != -1
|| driverName.indexOf("h2") != -1 || driverName.indexOf("moonbox") != -1) {
if (driverName.indexOf("sqlserver") != -1
|| driverName.indexOf("mysql") != -1
|| driverName.indexOf("h2") != -1
|| driverName.indexOf("moonbox") != -1) {
druidDataSource.setValidationQuery("select 1");
}
@@ -240,9 +242,12 @@ public class JdbcDataSource {
}
private String getDataSourceKey(Database database) {
return JdbcDataSourceUtils.getKey(database.getName(),
return JdbcDataSourceUtils.getKey(
database.getName(),
database.getUrl(),
database.getUsername(),
database.passwordDecrypt(), "", false);
database.passwordDecrypt(),
"",
false);
}
}

View File

@@ -1,9 +1,10 @@
package com.tencent.supersonic.headless.core.pojo;
import java.util.List;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class Materialization {

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import lombok.Data;
import java.util.List;
@Data
@@ -13,5 +14,4 @@ public class MetricQueryParam {
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = false;
}

View File

@@ -54,12 +54,17 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
headlessConverter.convert(queryStatement);
}
}
log.debug("SemanticConverter after {} {} {}", queryParam, queryStatement.getDataSetQueryParam(),
log.debug(
"SemanticConverter after {} {} {}",
queryParam,
queryStatement.getDataSetQueryParam(),
queryStatement.getMetricQueryParam());
if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) {
doParse(queryStatement.getDataSetQueryParam(), queryStatement);
} else {
queryStatement.getMetricQueryParam().setNativeQuery(queryParam.getQueryType().isNativeAggQuery());
queryStatement
.getMetricQueryParam()
.setNativeQuery(queryParam.getQueryType().isNativeAggQuery());
doParse(queryStatement);
}
if (StringUtils.isEmpty(queryStatement.getSql())) {
@@ -67,38 +72,56 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
if (StringUtils.isNotBlank(queryStatement.getSql())
&& !SqlSelectHelper.hasLimit(queryStatement.getSql())) {
String querySql = queryStatement.getSql() + " limit " + queryStatement.getLimit().toString();
String querySql =
queryStatement.getSql() + " limit " + queryStatement.getLimit().toString();
queryStatement.setSql(querySql);
}
}
public QueryStatement doParse(DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) {
public QueryStatement doParse(
DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) {
log.info("parse dataSetQuery [{}] ", dataSetQueryParam);
try {
if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) {
List<String[]> tables = new ArrayList<>();
boolean isSingleTable = dataSetQueryParam.getTables().size() == 1;
for (MetricTable metricTable : dataSetQueryParam.getTables()) {
QueryStatement tableSql = parserSql(metricTable, isSingleTable,
dataSetQueryParam, queryStatement);
if (isSingleTable && Objects.nonNull(tableSql.getDataSetQueryParam())
QueryStatement tableSql =
parserSql(
metricTable, isSingleTable, dataSetQueryParam, queryStatement);
if (isSingleTable
&& Objects.nonNull(tableSql.getDataSetQueryParam())
&& !tableSql.getDataSetSimplifySql().isEmpty()) {
queryStatement.setSql(tableSql.getDataSetSimplifySql());
queryStatement.setDataSetQueryParam(dataSetQueryParam);
return queryStatement;
}
tables.add(new String[]{metricTable.getAlias(), tableSql.getSql()});
tables.add(new String[] {metricTable.getAlias(), tableSql.getSql()});
}
if (!tables.isEmpty()) {
String sql;
if (dataSetQueryParam.isSupportWith()) {
sql = "with " + tables.stream().map(t -> String.format("%s as (%s)", t[0], t[1])).collect(
Collectors.joining(",")) + "\n" + dataSetQueryParam.getSql();
sql =
"with "
+ tables.stream()
.map(t -> String.format("%s as (%s)", t[0], t[1]))
.collect(Collectors.joining(","))
+ "\n"
+ dataSetQueryParam.getSql();
} else {
sql = dataSetQueryParam.getSql();
for (String[] tb : tables) {
sql = StringUtils.replace(sql, tb[0],
"(" + tb[1] + ") " + (dataSetQueryParam.isWithAlias() ? "" : tb[0]), -1);
sql =
StringUtils.replace(
sql,
tb[0],
"("
+ tb[1]
+ ") "
+ (dataSetQueryParam.isWithAlias()
? ""
: tb[0]),
-1);
}
}
queryStatement.setSql(sql);
@@ -114,8 +137,9 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
public QueryStatement doParse(QueryStatement queryStatement) {
return doParse(queryStatement, AggOption.getAggregation(
queryStatement.getMetricQueryParam().isNativeQuery()));
return doParse(
queryStatement,
AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery()));
}
public QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) {
@@ -130,9 +154,12 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
return queryStatement;
}
private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable,
DataSetQueryParam dataSetQueryParam,
QueryStatement queryStatement) throws Exception {
private QueryStatement parserSql(
MetricTable metricTable,
Boolean isSingleMetricTable,
DataSetQueryParam dataSetQueryParam,
QueryStatement queryStatement)
throws Exception {
MetricQueryParam metricReq = new MetricQueryParam();
metricReq.setMetrics(metricTable.getMetrics());
metricReq.setDimensions(metricTable.getDimensions());
@@ -151,10 +178,11 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
tableSql = doParse(tableSql, metricTable.getAggOption());
if (!tableSql.isOk()) {
throw new Exception(String.format("parser table [%s] error [%s]", metricTable.getAlias(),
tableSql.getErrMsg()));
throw new Exception(
String.format(
"parser table [%s] error [%s]",
metricTable.getAlias(), tableSql.getErrMsg()));
}
return tableSql;
}
}

View File

@@ -10,9 +10,7 @@ import org.springframework.util.CollectionUtils;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Remove the default metric added by the system when the query only has dimensions
*/
/** Remove the default metric added by the system when the query only has dimensions */
@Slf4j
@Component("DetailQueryOptimizer")
public class DetailQueryOptimizer implements QueryOptimizer {
@@ -26,10 +24,14 @@ public class DetailQueryOptimizer implements QueryOptimizer {
}
log.debug("before handleNoMetric, sql:{}", sqlRaw);
if (isDetailQuery(queryParam)) {
if (queryParam.getMetrics().size() == 0 && !CollectionUtils.isEmpty(queryParam.getGroups())) {
if (queryParam.getMetrics().size() == 0
&& !CollectionUtils.isEmpty(queryParam.getGroups())) {
String sqlForm = "select %s from ( %s ) src_no_metric";
String sql = String.format(sqlForm, queryParam.getGroups().stream().collect(
Collectors.joining(",")), sqlRaw);
String sql =
String.format(
sqlForm,
queryParam.getGroups().stream().collect(Collectors.joining(",")),
sqlRaw);
queryStatement.setSql(sql);
}
}
@@ -37,7 +39,8 @@ public class DetailQueryOptimizer implements QueryOptimizer {
}
public boolean isDetailQuery(QueryParam queryParam) {
return Objects.nonNull(queryParam) && queryParam.getQueryType().isNativeAggQuery()
return Objects.nonNull(queryParam)
&& queryParam.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(queryParam.getMetrics());
}
}

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* A query optimizer rewrites physical SQL by following a set of
* optimization rules, trying to derive the most efficient query.
* A query optimizer rewrites physical SQL by following a set of optimization rules, trying to
* derive the most efficient query.
*/
public interface QueryOptimizer {
void rewrite(QueryStatement queryStatement);

View File

@@ -3,9 +3,7 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* A query parser generates physical SQL for the QueryStatement.
*/
/** A query parser generates physical SQL for the QueryStatement. */
public interface QueryParser {
void parse(QueryStatement queryStatement, AggOption aggOption) throws Exception;
}

View File

@@ -3,11 +3,10 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* SemanticTranslator converts semantic query statement into SQL statement that
* can be executed against physical data models.
* SemanticTranslator converts semantic query statement into SQL statement that can be executed
* against physical data models.
*/
public interface SemanticTranslator {
void translate(QueryStatement queryStatement);
}

View File

@@ -3,20 +3,18 @@ package com.tencent.supersonic.headless.core.translator.calcite;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.QueryParser;
import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
/**
* the calcite parse implements
*/
/** the calcite parse implements */
@Component("CalciteQueryParser")
@Slf4j
public class CalciteQueryParser implements QueryParser {
@@ -33,14 +31,21 @@ public class CalciteQueryParser implements QueryParser {
SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement);
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
aggBuilder.explain(queryStatement, isAgg);
EngineType engineType = EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
queryStatement.setSql(aggBuilder.getSql(engineType));
if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getDataSetAlias()) && !queryStatement.getDataSetAlias().isEmpty()) {
if (Objects.nonNull(queryStatement.getEnableOptimize())
&& queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getDataSetAlias())
&& !queryStatement.getDataSetAlias().isEmpty()) {
// simplify model sql with query sql
String simplifySql = aggBuilder.simplify(
getSqlByDataSet(aggBuilder.getSql(engineType), queryStatement.getDataSetSql(),
queryStatement.getDataSetAlias()), engineType);
String simplifySql =
aggBuilder.simplify(
getSqlByDataSet(
aggBuilder.getSql(engineType),
queryStatement.getDataSetSql(),
queryStatement.getDataSetAlias()),
engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.debug("simplifySql [{}]", simplifySql);
queryStatement.setDataSetSimplifySql(simplifySql);
@@ -48,15 +53,20 @@ public class CalciteQueryParser implements QueryParser {
}
}
private SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) {
SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build();
private SemanticSchema getSemanticSchema(
SemanticModel semanticModel, QueryStatement queryStatement) {
SemanticSchema semanticSchema =
SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build();
semanticSchema.setSemanticModel(semanticModel);
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics());
semanticSchema.setJoinRelations(semanticModel.getJoinRelations());
semanticSchema.setRuntimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
.enableOptimize(queryStatement.getEnableOptimize()).build());
semanticSchema.setRuntimeOptions(
RuntimeOptions.builder()
.minMaxTime(queryStatement.getMinMaxTime())
.enableOptimize(queryStatement.getEnableOptimize())
.build());
return semanticSchema;
}

View File

@@ -1,15 +1,10 @@
package com.tencent.supersonic.headless.core.translator.calcite;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSqlTypeFactoryImpl;
import com.tencent.supersonic.headless.core.translator.calcite.schema.ViewExpanderImpl;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.calcite.config.CalciteConnectionConfig;
@@ -41,15 +36,20 @@ import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
/**
* global configuration of the calcite
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
/** global configuration of the calcite */
public class Configuration {
public static Properties configProperties = new Properties();
public static RelDataTypeFactory typeFactory = new SemanticSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static RelDataTypeFactory typeFactory =
new SemanticSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
public static CalciteConnectionConfig config =
new CalciteConnectionConfigImpl(configProperties);
public static SqlValidator.Config getValidatorConfig(EngineType engineType) {
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
@@ -60,10 +60,12 @@ public class Configuration {
}
static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
configProperties.put(CalciteConnectionProperty.UNQUOTED_CASING.camelName(), Casing.UNCHANGED.toString());
configProperties.put(CalciteConnectionProperty.QUOTED_CASING.camelName(), Casing.TO_LOWER.toString());
configProperties.put(
CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
configProperties.put(
CalciteConnectionProperty.UNQUOTED_CASING.camelName(), Casing.UNCHANGED.toString());
configProperties.put(
CalciteConnectionProperty.QUOTED_CASING.camelName(), Casing.TO_LOWER.toString());
}
public static SqlParser.Config getParserConfig(EngineType engineType) {
@@ -76,7 +78,9 @@ public class Configuration {
parserConfig.setQuotedCasing(config.quotedCasing());
parserConfig.setConformance(config.conformance());
parserConfig.setLex(Lex.BIG_QUERY);
parserConfig.setParserFactory(SqlParserImpl.FACTORY).setCaseSensitive(false)
parserConfig
.setParserFactory(SqlParserImpl.FACTORY)
.setCaseSensitive(false)
.setIdentifierMaxLength(Integer.MAX_VALUE)
.setQuoting(Quoting.BACK_TICK)
.setQuoting(Quoting.SINGLE_QUOTE)
@@ -93,22 +97,31 @@ public class Configuration {
List<SqlOperatorTable> tables = new ArrayList<>();
tables.add(SqlStdOperatorTable.instance());
SqlOperatorTable operatorTable = new ChainedSqlOperatorTable(tables);
//operatorTable.
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
rootSchema,
Collections.singletonList(rootSchema.getName()),
// operatorTable.
Prepare.CatalogReader catalogReader =
new CalciteCatalogReader(
rootSchema,
Collections.singletonList(rootSchema.getName()),
typeFactory,
config);
return SqlValidatorUtil.newValidator(
operatorTable,
catalogReader,
typeFactory,
config
);
return SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory,
Configuration.getValidatorConfig(engineType));
}
public static SqlValidatorWithHints getSqlValidatorWithHints(CalciteSchema rootSchema, EngineType engineTyp) {
return new SqlAdvisorValidator(SqlStdOperatorTable.instance(),
new CalciteCatalogReader(rootSchema,
Collections.singletonList(rootSchema.getName()), typeFactory, config),
typeFactory, SqlValidator.Config.DEFAULT);
public static SqlValidatorWithHints getSqlValidatorWithHints(
CalciteSchema rootSchema, EngineType engineTyp) {
return new SqlAdvisorValidator(
SqlStdOperatorTable.instance(),
new CalciteCatalogReader(
rootSchema,
Collections.singletonList(rootSchema.getName()),
typeFactory,
config),
typeFactory,
SqlValidator.Config.DEFAULT);
}
public static SqlToRelConverter.Config getConverterConfig() {
@@ -120,22 +133,29 @@ public class Configuration {
.addRelBuilderConfigTransform(c -> c.withSimplify(false));
}
public static SqlToRelConverter getSqlToRelConverter(SqlValidatorScope scope, SqlValidator sqlValidator,
RelOptPlanner relOptPlanner, EngineType engineType) {
public static SqlToRelConverter getSqlToRelConverter(
SqlValidatorScope scope,
SqlValidator sqlValidator,
RelOptPlanner relOptPlanner,
EngineType engineType) {
RexBuilder rexBuilder = new RexBuilder(typeFactory);
RelOptCluster cluster = RelOptCluster.create(relOptPlanner, rexBuilder);
FrameworkConfig fromworkConfig = Frameworks.newConfigBuilder()
.parserConfig(getParserConfig(engineType))
.defaultSchema(scope.getValidator().getCatalogReader().getRootSchema().plus())
.build();
return new SqlToRelConverter(new ViewExpanderImpl(),
FrameworkConfig fromworkConfig =
Frameworks.newConfigBuilder()
.parserConfig(getParserConfig(engineType))
.defaultSchema(
scope.getValidator().getCatalogReader().getRootSchema().plus())
.build();
return new SqlToRelConverter(
new ViewExpanderImpl(),
sqlValidator,
(CatalogReader) scope.getValidator().getCatalogReader(), cluster, fromworkConfig.getConvertletTable(),
(CatalogReader) scope.getValidator().getCatalogReader(),
cluster,
fromworkConfig.getConvertletTable(),
getConverterConfig());
}
public static SqlAdvisor getSqlAdvisor(SqlValidatorWithHints validator, EngineType engineType) {
return new SqlAdvisor(validator, getParserConfig(engineType));
}
}

View File

@@ -1,23 +1,27 @@
package com.tencent.supersonic.headless.core.translator.calcite.planner;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.OutputRender;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -25,14 +29,7 @@ import java.util.ListIterator;
import java.util.Objects;
import java.util.Stack;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope;
/**
* parsing from query dimensions and metrics
*/
/** parsing from query dimensions and metrics */
@Slf4j
public class AggPlanner implements Planner {
@@ -71,15 +68,15 @@ public class AggPlanner implements Planner {
Renderer renderer = it.next();
if (previous != null) {
previous.render(metricReq, datasource, scope, schema, !isAgg);
renderer.setTable(previous.builderAs(DataSourceNode.getNames(datasource) + "_" + String.valueOf(i)));
renderer.setTable(
previous.builderAs(
DataSourceNode.getNames(datasource) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(metricReq, datasource, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
private List<DataSource> getMatchDataSource(SqlValidatorScope scope) throws Exception {
@@ -91,8 +88,10 @@ public class AggPlanner implements Planner {
return AggOption.isAgg(aggOption);
}
// default by dataSource time aggregation
if (Objects.nonNull(dataSource.getAggTime()) && !dataSource.getAggTime().equalsIgnoreCase(
Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (Objects.nonNull(dataSource.getAggTime())
&& !dataSource
.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!metricReq.isNativeQuery()) {
return true;
}
@@ -137,7 +136,8 @@ public class AggPlanner implements Planner {
}
public void optimize(EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
}
@@ -149,9 +149,11 @@ public class AggPlanner implements Planner {
public String optimize(String sql, EngineType engineType) {
try {
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
SqlNode sqlNode =
SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
return SemanticNode.getSql(SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
return SemanticNode.getSql(
SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
}
} catch (Exception e) {
log.error("optimize error {}", e);
@@ -161,7 +163,8 @@ public class AggPlanner implements Planner {
private SqlNode optimizeSql(String sql, EngineType engineType) {
try {
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
SqlNode sqlNode =
SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
return SemanticNode.optimize(scope, schema, sqlNode, engineType);
}
@@ -170,4 +173,4 @@ public class AggPlanner implements Planner {
}
return null;
}
}
}

View File

@@ -1,13 +1,10 @@
package com.tencent.supersonic.headless.core.translator.calcite.planner;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* parse and generate SQL and other execute information
*/
/** parse and generate SQL and other execute information */
public interface Planner {
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception;

View File

@@ -18,5 +18,4 @@ public class Constants {
public static final String SQL_PARSER_DB = "parsed_db";
public static final String SQL_PARSER_FIELD = "parsed_field";
public static final String DIMENSION_DELIMITER = "dimension_delimiter";
}

View File

@@ -5,7 +5,6 @@ import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataSource {

View File

@@ -3,7 +3,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
@@ -52,4 +51,4 @@ public enum DataType {
public boolean isArray() {
return ARRAY.equals(this);
}
}
}

View File

@@ -1,14 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem;
import java.util.List;
import java.util.Map;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
@Data
@Builder

View File

@@ -2,7 +2,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import lombok.Data;
@Data
public class DimensionTimeTypeParams {

View File

@@ -4,14 +4,14 @@ import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
public enum Type {
PRIMARY, FOREIGN
PRIMARY,
FOREIGN
}
private String name;

View File

@@ -1,10 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import java.util.List;
import lombok.Builder;
import lombok.Data;
import org.apache.commons.lang3.tuple.Triple;
import java.util.List;
@Data
@Builder
public class JoinRelation {
@@ -13,5 +14,4 @@ public class JoinRelation {
private String right;
private String joinType;
private List<Triple<String, String, String>> joinCondition;
}

View File

@@ -1,9 +1,10 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
@@ -11,10 +12,8 @@ public class Materialization {
public enum TimePartType {
/**
* partition time type
* 1 - FULL, not use partition
* 2 - PARTITION , use time list
* 3 - ZIPPER, use [startDate, endDate] range time
* partition time type 1 - FULL, not use partition 2 - PARTITION , use time list 3 - ZIPPER,
* use [startDate, endDate] range time
*/
FULL("FULL"),
PARTITION("PARTITION"),
@@ -46,6 +45,4 @@ public class Materialization {
private Integer level;
private List<MaterializationElement> dimensions = new ArrayList<>();
private List<MaterializationElement> metrics = new ArrayList<>();
}

View File

@@ -1,9 +1,10 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import java.util.List;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class MaterializationElement {

View File

@@ -5,7 +5,6 @@ import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@@ -14,7 +13,7 @@ public class Measure {
private String name;
//sum max min avg count distinct
// sum max min avg count distinct
private String agg;
private String expr;

View File

@@ -1,11 +1,9 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem;
import java.util.List;
import lombok.Data;
import java.util.List;
@Data
public class Metric implements SemanticItem {

View File

@@ -1,8 +1,9 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import java.util.List;
import lombok.Data;
import java.util.List;
@Data
public class MetricTypeParams {
@@ -11,5 +12,4 @@ public class MetricTypeParams {
private List<Measure> fields;
private boolean isFieldMetric = false;
private String expr;
}

View File

@@ -22,11 +22,13 @@ public class SemanticModel {
private Database database;
public List<Dimension> getDimensions() {
return dimensionMap.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
return dimensionMap.values().stream()
.flatMap(Collection::stream)
.collect(Collectors.toList());
}
public Map<Long, DataSource> getModelMap() {
return datasourceMap.values().stream().collect(Collectors.toMap(DataSource::getId, dataSource -> dataSource));
return datasourceMap.values().stream()
.collect(Collectors.toMap(DataSource::getId, dataSource -> dataSource));
}
}

View File

@@ -1,8 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.DataContext;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.plan.RelOptTable;
@@ -22,9 +19,10 @@ import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeName;
/**
* customize the AbstractTable
*/
import java.util.ArrayList;
import java.util.List;
/** customize the AbstractTable */
public class DataSourceTable extends AbstractTable implements ScannableTable, TranslatableTable {
private final String tableName;
@@ -34,7 +32,10 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
private RelDataType rowType;
private DataSourceTable(String tableName, List<String> fieldNames, List<SqlTypeName> fieldTypes,
private DataSourceTable(
String tableName,
List<String> fieldNames,
List<SqlTypeName> fieldTypes,
Statistic statistic) {
this.tableName = tableName;
this.fieldNames = fieldNames;
@@ -79,10 +80,10 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
public RelNode toRel(RelOptTable.ToRelContext toRelContext, RelOptTable relOptTable) {
List<RelHint> hint = new ArrayList<>();
return new LogicalTableScan(toRelContext.getCluster(), toRelContext.getCluster().traitSet(), hint, relOptTable);
return new LogicalTableScan(
toRelContext.getCluster(), toRelContext.getCluster().traitSet(), hint, relOptTable);
}
public static final class Builder {
private final String tableName;
@@ -127,8 +128,8 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
throw new IllegalStateException("Table must have positive row count");
}
return new DataSourceTable(tableName, fieldNames, fieldTypes, Statistics.of(rowCount, null));
return new DataSourceTable(
tableName, fieldNames, fieldTypes, Statistics.of(rowCount, null));
}
}
}

View File

@@ -9,5 +9,4 @@ import org.apache.commons.lang3.tuple.Triple;
public class RuntimeOptions {
private Triple<String, String, String> minMaxTime;
private Boolean enableOptimize;
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2SQLSqlValidatorImpl;
@@ -32,38 +31,50 @@ public class SchemaBuilder {
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getSchemaKey(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
rootSchema,
Collections.singletonList(schema.getSchemaKey()),
Configuration.typeFactory,
Configuration.config
);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
Prepare.CatalogReader catalogReader =
new CalciteCatalogReader(
rootSchema,
Collections.singletonList(schema.getSchemaKey()),
Configuration.typeFactory,
Configuration.config);
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(
Configuration.operatorTable,
catalogReader,
Configuration.typeFactory,
Configuration.getValidatorConfig(engineType));
return new ParameterScope(s2SQLSqlValidator, nameToTypeMap);
}
public static CalciteSchema getMaterializationSchema() {
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
SchemaPlus schema = rootSchema.plus().add(MATERIALIZATION_SYS_DB, new AbstractSchema());
DataSourceTable srcTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_SOURCE)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
DataSourceTable srcTable =
DataSourceTable.newBuilder(MATERIALIZATION_SYS_SOURCE)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
schema.add(MATERIALIZATION_SYS_SOURCE, srcTable);
DataSourceTable dataSetTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_VIEW)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
DataSourceTable dataSetTable =
DataSourceTable.newBuilder(MATERIALIZATION_SYS_VIEW)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
schema.add(MATERIALIZATION_SYS_VIEW, dataSetTable);
return rootSchema;
}
public static void addSourceView(CalciteSchema dataSetSchema, String dbSrc, String tbSrc, Set<String> dates,
Set<String> dimensions, Set<String> metrics) {
public static void addSourceView(
CalciteSchema dataSetSchema,
String dbSrc,
String tbSrc,
Set<String> dates,
Set<String> dimensions,
Set<String> metrics) {
String tb = tbSrc;
String db = dbSrc;
DataSourceTable.Builder builder = DataSourceTable.newBuilder(tb);
@@ -76,9 +87,7 @@ public class SchemaBuilder {
for (String metric : metrics) {
builder.addField(metric, SqlTypeName.ANY);
}
DataSourceTable srcTable = builder
.withRowCount(1)
.build();
DataSourceTable srcTable = builder.withRowCount(1).build();
if (Objects.nonNull(db) && !db.isEmpty()) {
SchemaPlus dbPs = dataSetSchema.plus();
for (String d : db.split("\\.")) {

View File

@@ -1,12 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Table;
@@ -16,7 +15,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SemanticSchema extends AbstractSchema {
private final String schemaKey;
@@ -28,7 +26,6 @@ public class SemanticSchema extends AbstractSchema {
private RuntimeOptions runtimeOptions;
private SemanticSchema(String schemaKey, Map<String, Table> tableMap) {
this.schemaKey = schemaKey;
this.tableMap = tableMap;
@@ -123,7 +120,8 @@ public class SemanticSchema extends AbstractSchema {
public Builder addTable(DataSourceTable table) {
if (tableMap.containsKey(table.getTableName())) {
throw new IllegalArgumentException("Table already defined: " + table.getTableName());
throw new IllegalArgumentException(
"Table already defined: " + table.getTableName());
}
tableMap.put(table.getTableName(), table);
@@ -135,5 +133,4 @@ public class SemanticSchema extends AbstractSchema {
return new SemanticSchema(schemaKey, tableMap);
}
}
}

View File

@@ -4,9 +4,7 @@ import org.apache.calcite.sql.fun.SqlLibrary;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
/**
* customize the SqlConformance
*/
/** customize the SqlConformance */
public class SemanticSqlConformance implements SqlConformance {
@Override
@@ -163,4 +161,4 @@ public class SemanticSqlConformance implements SqlConformance {
public boolean allowLenientCoercion() {
return false;
}
}
}

View File

@@ -8,9 +8,7 @@ import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.validate.SqlConformance;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* customize the SqlDialect
*/
/** customize the SqlDialect */
public class SemanticSqlDialect extends SqlDialect {
private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
@@ -19,7 +17,8 @@ public class SemanticSqlDialect extends SqlDialect {
super(context);
}
public static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
public static void unparseFetchUsingAnsi(
SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
Preconditions.checkArgument(fetch != null || offset != null);
SqlWriter.Frame fetchFrame;
writer.newlineAndIndent();
@@ -39,7 +38,6 @@ public class SemanticSqlDialect extends SqlDialect {
}
writer.endList(fetchFrame);
}
@Override
@@ -76,10 +74,11 @@ public class SemanticSqlDialect extends SqlDialect {
return true;
}
public void unparseSqlIntervalLiteral(SqlWriter writer, SqlIntervalLiteral literal, int leftPrec, int rightPrec) {
}
public void unparseSqlIntervalLiteral(
SqlWriter writer, SqlIntervalLiteral literal, int leftPrec, int rightPrec) {}
public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
public void unparseOffsetFetch(
SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
unparseFetchUsingAnsi(writer, offset, fetch);
}
}

View File

@@ -1,12 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema;
import java.nio.charset.Charset;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
/**
* customize the SqlTypeFactoryImpl
*/
import java.nio.charset.Charset;
/** customize the SqlTypeFactoryImpl */
public class SemanticSqlTypeFactoryImpl extends SqlTypeFactoryImpl {
public SemanticSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) {

View File

@@ -6,16 +6,16 @@ import org.apache.calcite.rel.type.RelDataType;
import java.util.List;
/**
* customize the ViewExpander
*/
/** customize the ViewExpander */
public class ViewExpanderImpl implements RelOptTable.ViewExpander {
public ViewExpanderImpl() {
}
public ViewExpanderImpl() {}
@Override
public RelRoot expandView(RelDataType rowType, String queryString, List<String> schemaPath,
public RelRoot expandView(
RelDataType rowType,
String queryString,
List<String> schemaPath,
List<String> dataSetPath) {
return null;
}
}
}

View File

@@ -1,9 +1,7 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
public interface Optimization {
public void visit(SemanticSchema semanticSchema);

View File

@@ -1,8 +1,8 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
@@ -11,45 +11,56 @@ import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSc
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
/**
* process TableView
*/
/** process TableView */
@Data
public abstract class Renderer {
protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataSource datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
return datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataSource datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)).findFirst();
return datasource.getMeasures().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Metric> getMetricByName(String name, SemanticSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
Optional<Metric> metric =
schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataSource datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
return datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
}
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
public static MetricNode buildMetricNode(
String metric,
DataSource datasource,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg,
String alias)
throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
@@ -58,37 +69,61 @@ public abstract class Renderer {
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode()
.put(measure.get().getName(),
metricNode
.getNonAggNode()
.put(
measure.get().getName(),
MeasureNode.buildNonAgg(
alias, measure.get(), scope, engineType));
metricNode
.getAggNode()
.put(
measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
metricNode
.getAggFunction()
.put(measure.get().getName(), measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode
.getNonAggNode()
.put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode
.getAggNode()
.put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter()
.put(m.getName(), SemanticNode.parse(m.getConstraint(), scope, engineType));
metricNode
.getMeasureFilter()
.put(
m.getName(),
SemanticNode.parse(m.getConstraint(), scope, engineType));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode
.getNonAggNode()
.put(
measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode
.getAggNode()
.put(
measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(measure.get().getName(),
SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
metricNode
.getMeasureFilter()
.put(
measure.get().getName(),
SemanticNode.parse(
measure.get().getConstraint(), scope, engineType));
}
}
return metricNode;
@@ -111,6 +146,11 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(MetricQueryParam metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception;
public abstract void render(
MetricQueryParam metricCommand,
List<DataSource> dataSources,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception;
}

View File

@@ -5,13 +5,14 @@ import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
import org.apache.calcite.sql.validate.SqlValidatorImpl;
/**
* customize the SqlValidatorImpl
*/
/** customize the SqlValidatorImpl */
public class S2SQLSqlValidatorImpl extends SqlValidatorImpl {
public S2SQLSqlValidatorImpl(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory, Config config) {
public S2SQLSqlValidatorImpl(
SqlOperatorTable opTab,
SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory,
Config config) {
super(opTab, catalogReader, typeFactory, config);
}
}

View File

@@ -1,9 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind;
@@ -12,9 +9,11 @@ import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.parser.SqlParserPos;
/**
* basic query project
*/
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/** basic query project */
@Data
public class TableView {
@@ -40,15 +39,29 @@ public class TableView {
if (filter.size() > 0) {
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO), table,
filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch, null);
return new SqlSelect(
SqlParserPos.ZERO,
null,
new SqlNodeList(measure, SqlParserPos.ZERO),
table,
filterNodeList,
dimensionNodeList,
null,
null,
null,
order,
offset,
fetch,
null);
}
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS) ? ((SqlBasicCall) s).getOperandList().get(0) : s))
.collect(
Collectors.toList());
.map(
s ->
(s.getKind().equals(SqlKind.AS)
? ((SqlBasicCall) s).getOperandList().get(0)
: s))
.collect(Collectors.toList());
}
}

View File

@@ -1,19 +1,28 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.Objects;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope, EngineType engineType)
public static SqlNode build(
String agg, String name, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (Objects.isNull(agg) || agg.isEmpty()) {
return parse(name, scope, engineType);
}
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope,
return parse(
AggFunction.COUNT.name()
+ " ( "
+ AggFunction.DISTINCT.name()
+ " "
+ name
+ " ) ",
scope,
engineType);
}
return parse(agg + " ( " + name + " ) ", scope, engineType);
@@ -28,6 +37,4 @@ public class AggFunctionNode extends SemanticNode {
COUNT,
DISTINCT
}
}

View File

@@ -3,7 +3,10 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
@@ -11,9 +14,17 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend.LateralViewExplodeNode;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
@@ -26,17 +37,6 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
@Slf4j
public class DataSourceNode extends SemanticNode {
@@ -46,8 +46,9 @@ public class DataSourceNode extends SemanticNode {
sqlTable = datasource.getSqlQuery();
} else if (datasource.getTableQuery() != null && !datasource.getTableQuery().isEmpty()) {
if (datasource.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = Arrays.stream(datasource.getTableQuery().split("\\."))
.collect(Collectors.joining(".public."));
String fullTableName =
Arrays.stream(datasource.getTableQuery().split("\\."))
.collect(Collectors.joining(".public."));
sqlTable = "select * from " + fullTableName;
} else {
sqlTable = "select * from " + datasource.getTableQuery();
@@ -61,7 +62,8 @@ public class DataSourceNode extends SemanticNode {
return buildAs(datasource.getName(), source);
}
private static void addSchema(SqlValidatorScope scope, DataSource datasource, String table) throws Exception {
private static void addSchema(SqlValidatorScope scope, DataSource datasource, String table)
throws Exception {
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
String tb = entry.getKey();
@@ -74,7 +76,11 @@ public class DataSourceNode extends SemanticNode {
}
}
private static void addSchemaTable(SqlValidatorScope scope, DataSource datasource, String db, String tb,
private static void addSchemaTable(
SqlValidatorScope scope,
DataSource datasource,
String db,
String tb,
Set<String> fields)
throws Exception {
Set<String> dateInfo = new HashSet<>();
@@ -82,7 +88,8 @@ public class DataSourceNode extends SemanticNode {
Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType());
for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName());
}
@@ -90,13 +97,15 @@ public class DataSourceNode extends SemanticNode {
dimensions.add(i.getName());
}
for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> {
if (!dimensions.contains(i.toString())) {
metrics.add(i.toString());
}
}
);
List<SqlNode> identifiers =
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream()
.forEach(
i -> {
if (!dimensions.contains(i.toString())) {
metrics.add(i.toString());
}
});
if (!dimensions.contains(m.getName())) {
metrics.add(m.getName());
}
@@ -107,37 +116,52 @@ public class DataSourceNode extends SemanticNode {
log.info("add column {} {}", datasource.getName(), field);
}
}
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db,
tb, dateInfo,
dimensions, metrics);
SchemaBuilder.addSourceView(
scope.getValidator().getCatalogReader().getRootSchema(),
db,
tb,
dateInfo,
dimensions,
metrics);
}
public static SqlNode buildExtend(DataSource datasource, Map<String, String> exprList,
SqlValidatorScope scope)
public static SqlNode buildExtend(
DataSource datasource, Map<String, String> exprList, SqlValidatorScope scope)
throws Exception {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
EngineType engineType = EngineType.fromString(datasource.getType());
SqlNode dataSet = new SqlBasicCall(new LateralViewExplodeNode(exprList), Arrays.asList(build(datasource, scope),
new SqlNodeList(getExtendField(exprList, scope, engineType), SqlParserPos.ZERO)), SqlParserPos.ZERO);
SqlNode dataSet =
new SqlBasicCall(
new LateralViewExplodeNode(exprList),
Arrays.asList(
build(datasource, scope),
new SqlNodeList(
getExtendField(exprList, scope, engineType),
SqlParserPos.ZERO)),
SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, dataSet);
}
public static List<SqlNode> getExtendField(Map<String, String> exprList, SqlValidatorScope scope,
EngineType engineType)
public static List<SqlNode> getExtendField(
Map<String, String> exprList, SqlValidatorScope scope, EngineType engineType)
throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>();
for (String expr : exprList.keySet()) {
sqlNodeList.add(parse(expr, scope, engineType));
sqlNodeList.add(new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO),
SqlParserPos.ZERO));
sqlNodeList.add(
new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(
expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX,
SqlParserPos.ZERO),
SqlParserPos.ZERO));
}
return sqlNodeList;
}
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope, EngineType engineType) throws Exception {
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseQuery();
scope.validateExpr(sqlNode);
@@ -148,33 +172,57 @@ public class DataSourceNode extends SemanticNode {
return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(SemanticSchema schema, MetricQueryParam metricCommand,
Set<String> queryDimension, List<String> measures) {
queryDimension.addAll(metricCommand.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName = schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream().forEach(mm -> measures.add(mm.getName())));
metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)).forEach(m -> measures.add(m));
public static void getQueryDimensionMeasure(
SemanticSchema schema,
MetricQueryParam metricCommand,
Set<String> queryDimension,
List<String> measures) {
queryDimension.addAll(
metricCommand.getDimensions().stream()
.map(
d ->
d.contains(Constants.DIMENSION_IDENTIFY)
? d.split(Constants.DIMENSION_IDENTIFY)[1]
: d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName =
schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
schema.getMetrics().stream()
.filter(m -> metricCommand.getMetrics().contains(m.getName()))
.forEach(
m ->
m.getMetricTypeParams().getMeasures().stream()
.forEach(mm -> measures.add(mm.getName())));
metricCommand.getMetrics().stream()
.filter(m -> !schemaMetricName.contains(m))
.forEach(m -> measures.add(m));
}
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricQueryParam metricCommand,
Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
public static void mergeQueryFilterDimensionMeasure(
SemanticSchema schema,
MetricQueryParam metricCommand,
Set<String> queryDimension,
List<String> measures,
SqlValidatorScope scope)
throws Exception {
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), filterConditions);
FilterNode.getFilterField(
parse(metricCommand.getWhere(), scope, engineType), filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = schema.getMetrics().stream()
.map(m -> m.getName()).collect(Collectors.toSet());
Set<String> schemaMetricName =
schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) {
schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
.forEach(mm -> queryMeasures.add(mm.getName())));
schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(
m ->
m.getMetricTypeParams().getMeasures().stream()
.forEach(
mm -> queryMeasures.add(mm.getName())));
continue;
}
queryDimension.add(filterCondition);
@@ -184,8 +232,9 @@ public class DataSourceNode extends SemanticNode {
}
}
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, SemanticSchema schema,
MetricQueryParam metricCommand) throws Exception {
public static List<DataSource> getMatchDataSources(
SqlValidatorScope scope, SemanticSchema schema, MetricQueryParam metricCommand)
throws Exception {
List<DataSource> dataSources = new ArrayList<>();
// check by metric
@@ -196,14 +245,18 @@ public class DataSourceNode extends SemanticNode {
// one , match measure count
Map<String, Integer> dataSourceMeasures = new HashMap<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> sourceMeasure =
entry.getValue().getMeasures().stream()
.map(mm -> mm.getName())
.collect(Collectors.toSet());
sourceMeasure.retainAll(measures);
dataSourceMeasures.put(entry.getKey(), sourceMeasure.size());
}
log.info("dataSourceMeasures [{}]", dataSourceMeasures);
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
Optional<Map.Entry<String, Integer>> base =
dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.findFirst();
if (base.isPresent()) {
baseDataSource = schema.getDatasource().get(base.get().getKey());
dataSources.add(baseDataSource);
@@ -211,57 +264,79 @@ public class DataSourceNode extends SemanticNode {
// second , check match all dimension and metric
if (baseDataSource != null) {
Set<String> filterMeasure = new HashSet<>();
Set<String> sourceMeasure = baseDataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> dimension = baseDataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
Set<String> sourceMeasure =
baseDataSource.getMeasures().stream()
.map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> dimension =
baseDataSource.getDimensions().stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
baseDataSource.getIdentifiers().stream().forEach(i -> dimension.add(i.getName()));
if (schema.getDimension().containsKey(baseDataSource.getName())) {
schema.getDimension().get(baseDataSource.getName()).stream().forEach(d -> dimension.add(d.getName()));
schema.getDimension().get(baseDataSource.getName()).stream()
.forEach(d -> dimension.add(d.getName()));
}
filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope,
engineType);
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
mergeQueryFilterDimensionMeasure(
schema, metricCommand, queryDimension, measures, scope);
boolean isAllMatch =
checkMatch(
sourceMeasure,
queryDimension,
measures,
dimension,
metricCommand,
scope,
engineType);
if (isAllMatch) {
log.debug("baseDataSource match all ");
return dataSources;
}
// find all dataSource has the same identifiers
List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, measures,
baseDataSource, schema);
List<DataSource> linkDataSources =
getLinkDataSourcesByJoinRelation(
queryDimension, measures, baseDataSource, schema);
if (CollectionUtils.isEmpty(linkDataSources)) {
log.debug("baseDataSource get by identifiers ");
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName())
.collect(Collectors.toSet());
Set<String> baseIdentifiers =
baseDataSource.getIdentifiers().stream()
.map(i -> i.getName())
.collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) {
throw new Exception("datasource error : " + baseDataSource.getName() + " miss identifier");
throw new Exception(
"datasource error : " + baseDataSource.getName() + " miss identifier");
}
linkDataSources = getLinkDataSources(baseIdentifiers, queryDimension, measures,
baseDataSource, schema);
linkDataSources =
getLinkDataSources(
baseIdentifiers, queryDimension, measures, baseDataSource, schema);
if (linkDataSources.isEmpty()) {
throw new Exception(
String.format("not find the match datasource : dimension[%s],measure[%s]", queryDimension,
measures));
String.format(
"not find the match datasource : dimension[%s],measure[%s]",
queryDimension, measures));
}
}
log.debug("linkDataSources {}", linkDataSources);
return linkDataSources;
//dataSources.addAll(linkDataSources);
// dataSources.addAll(linkDataSources);
}
return dataSources;
}
private static boolean checkMatch(Set<String> sourceMeasure,
private static boolean checkMatch(
Set<String> sourceMeasure,
Set<String> queryDimension,
List<String> measures,
Set<String> dimension,
MetricQueryParam metricCommand,
SqlValidatorScope scope,
EngineType engineType) throws Exception {
EngineType engineType)
throws Exception {
boolean isAllMatch = true;
sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) {
@@ -292,8 +367,11 @@ public class DataSourceNode extends SemanticNode {
return isAllMatch;
}
private static List<DataSource> getLinkDataSourcesByJoinRelation(Set<String> queryDimension, List<String> measures,
DataSource baseDataSource, SemanticSchema schema) {
private static List<DataSource> getLinkDataSourcesByJoinRelation(
Set<String> queryDimension,
List<String> measures,
DataSource baseDataSource,
SemanticSchema schema) {
Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>();
Set<String> before = new HashSet<>();
@@ -301,37 +379,49 @@ public class DataSourceNode extends SemanticNode {
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
Set<Long> visitJoinRelations = new HashSet<>();
List<JoinRelation> sortedJoinRelation = new ArrayList<>();
sortJoinRelation(schema.getJoinRelations(), baseDataSource.getName(), visitJoinRelations,
sortJoinRelation(
schema.getJoinRelations(),
baseDataSource.getName(),
visitJoinRelations,
sortedJoinRelation);
schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId()))
schema.getJoinRelations().stream()
.filter(j -> !visitJoinRelations.contains(j.getId()))
.forEach(j -> sortedJoinRelation.add(j));
for (JoinRelation joinRelation : sortedJoinRelation) {
if (!before.contains(joinRelation.getLeft()) && !before.contains(joinRelation.getRight())) {
if (!before.contains(joinRelation.getLeft())
&& !before.contains(joinRelation.getRight())) {
continue;
}
boolean isMatch = false;
boolean isRight = before.contains(joinRelation.getLeft());
DataSource other = isRight ? schema.getDatasource().get(joinRelation.getRight())
: schema.getDatasource().get(joinRelation.getLeft());
DataSource other =
isRight
? schema.getDatasource().get(joinRelation.getRight())
: schema.getDatasource().get(joinRelation.getLeft());
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
Set<String> linkDimension =
other.getDimensions().stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
Set<String> linkMeasure = other.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> linkMeasure =
other.getMeasures().stream()
.map(mm -> mm.getName())
.collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
}
if (!isMatch && schema.getDimension().containsKey(other.getName())) {
Set<String> linkDimension = schema.getDimension().get(other.getName()).stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
Set<String> linkDimension =
schema.getDimension().get(other.getName()).stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
@@ -348,27 +438,35 @@ public class DataSourceNode extends SemanticNode {
linkDataSourceName.add(baseDataSource.getName());
orders.put(baseDataSource.getName(), 0L);
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (linkDataSourceName.contains(joinRelation.getLeft()) && linkDataSourceName.contains(
joinRelation.getRight())) {
if (linkDataSourceName.contains(joinRelation.getLeft())
&& linkDataSourceName.contains(joinRelation.getRight())) {
orders.put(joinRelation.getLeft(), 0L);
orders.put(joinRelation.getRight(), 1L);
}
}
orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> {
linkDataSources.add(schema.getDatasource().get(d.getKey()));
});
orders.entrySet().stream()
.sorted(Map.Entry.comparingByValue())
.forEach(
d -> {
linkDataSources.add(schema.getDatasource().get(d.getKey()));
});
}
return linkDataSources;
}
private static void sortJoinRelation(List<JoinRelation> joinRelations, String next, Set<Long> visited,
private static void sortJoinRelation(
List<JoinRelation> joinRelations,
String next,
Set<Long> visited,
List<JoinRelation> sortedJoins) {
for (JoinRelation link : joinRelations) {
if (!visited.contains(link.getId())) {
if (link.getLeft().equals(next) || link.getRight().equals(next)) {
visited.add(link.getId());
sortedJoins.add(link);
sortJoinRelation(joinRelations, link.getLeft().equals(next) ? link.getRight() : link.getLeft(),
sortJoinRelation(
joinRelations,
link.getLeft().equals(next) ? link.getRight() : link.getLeft(),
visited,
sortedJoins);
}
@@ -376,7 +474,8 @@ public class DataSourceNode extends SemanticNode {
}
}
private static List<DataSource> getLinkDataSources(Set<String> baseIdentifiers,
private static List<DataSource> getLinkDataSources(
Set<String> baseIdentifiers,
Set<String> queryDimension,
List<String> measures,
DataSource baseDataSource,
@@ -387,22 +486,30 @@ public class DataSourceNode extends SemanticNode {
if (entry.getKey().equalsIgnoreCase(baseDataSource.getName())) {
continue;
}
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName())
.filter(i -> baseIdentifiers.contains(i)).count();
Long identifierNum =
entry.getValue().getIdentifiers().stream()
.map(i -> i.getName())
.filter(i -> baseIdentifiers.contains(i))
.count();
if (identifierNum > 0) {
boolean isMatch = false;
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
Set<String> linkDimension =
entry.getValue().getDimensions().stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream()
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> linkMeasure =
entry.getValue().getMeasures().stream()
.map(mm -> mm.getName())
.collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
@@ -415,8 +522,10 @@ public class DataSourceNode extends SemanticNode {
}
for (Map.Entry<String, List<Dimension>> entry : schema.getDimension().entrySet()) {
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
Set<String> linkDimension =
entry.getValue().stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
linkDataSourceName.add(entry.getKey());
@@ -434,4 +543,4 @@ public class DataSourceNode extends SemanticNode {
}
return Lists.newArrayList();
}
}
}

View File

@@ -1,40 +1,40 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return buildAs(dimension.getName(), sqlNode);
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
public static List<SqlNode> expand(
Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
public static SqlNode buildName(
Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
return parse(dimension.getName(), scope, engineType);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
public static SqlNode buildExp(
Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
return parse(dimension.getExpr(), scope, engineType);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope, EngineType engineType)
public static SqlNode buildNameAs(
String alias, Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope, engineType);
@@ -43,17 +43,20 @@ public class DimensionNode extends SemanticNode {
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
public static SqlNode buildArray(
Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope, engineType));
return buildAs(
dimension.getName(),
parse(
dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX,
scope,
engineType));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope, engineType);
}
}

View File

@@ -32,4 +32,4 @@ public class ExtendNode extends SqlInternalOperator {
writer.endList(frameArgs);
writer.endList(frame);
}
}
}

View File

@@ -1,10 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import java.util.Set;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import java.util.Set;
public class FilterNode extends SemanticNode {
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {

View File

@@ -2,30 +2,31 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType) throws Exception {
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return parse(identify.getName(), scope, engineType);
}
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {
return identifies.stream().filter(i -> type.name().equalsIgnoreCase(i.getType())).map(i -> i.getName())
return identifies.stream()
.filter(i -> type.name().equalsIgnoreCase(i.getType()))
.map(i -> i.getName())
.collect(Collectors.toSet());
}
public static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify = identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.FOREIGN.name().equalsIgnoreCase(identify.get().getType());
}
@@ -33,8 +34,8 @@ public class IdentifyNode extends SemanticNode {
}
public static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify = identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.PRIMARY.name().equalsIgnoreCase(identify.get().getType());
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import org.apache.calcite.sql.SqlNode;
@@ -8,26 +7,30 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope, EngineType engineType)
public static SqlNode buildNonAgg(
String alias, Measure measure, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType));
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope, EngineType engineType)
public static SqlNode buildAgg(
Measure measure, boolean noAgg, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope, engineType);
}
return buildAs(measure.getName(),
return buildAs(
measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope, EngineType enginType)
private static SqlNode getExpr(
Measure measure, String alias, SqlValidatorScope scope, EngineType enginType)
throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope, enginType);
return parse(
(alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope, enginType);
}
return parse(measure.getExpr(), scope, enginType);
}
}

View File

@@ -1,18 +1,16 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Data
public class MetricNode extends SemanticNode {
@@ -22,8 +20,10 @@ public class MetricNode extends SemanticNode {
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (metric.getMetricTypeParams() == null
|| metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope, engineType);
}
@@ -32,13 +32,14 @@ public class MetricNode extends SemanticNode {
}
public static Boolean isMetricField(String name, SemanticSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
Optional<Metric> metric =
schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric();
}
public static Boolean isMetricField(Metric metric) {
return metric.getMetricTypeParams().isFieldMetric();
}
}

View File

@@ -1,12 +1,11 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.translator.calcite.Configuration;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner;
@@ -52,15 +51,14 @@ import java.util.function.UnaryOperator;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* model item node
*/
/** model item node */
@Slf4j
public abstract class SemanticNode {
public static Set<SqlKind> AGGREGATION_KIND = new HashSet<>();
public static Set<String> AGGREGATION_FUNC = new HashSet<>();
public static List<String> groupHints = new ArrayList<>(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9"));
public static List<String> groupHints =
new ArrayList<>(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9"));
static {
AGGREGATION_KIND.add(SqlKind.AVG);
@@ -76,14 +74,18 @@ public abstract class SemanticNode {
AGGREGATION_FUNC.add("min");
}
public static SqlNode parse(String expression, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlValidatorWithHints sqlValidatorWithHints = Configuration.getSqlValidatorWithHints(
scope.getValidator().getCatalogReader().getRootSchema(), engineType);
if (Configuration.getSqlAdvisor(sqlValidatorWithHints, engineType).getReservedAndKeyWords()
public static SqlNode parse(String expression, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlValidatorWithHints sqlValidatorWithHints =
Configuration.getSqlValidatorWithHints(
scope.getValidator().getCatalogReader().getRootSchema(), engineType);
if (Configuration.getSqlAdvisor(sqlValidatorWithHints, engineType)
.getReservedAndKeyWords()
.contains(expression.toUpperCase())) {
expression = String.format("`%s`", expression);
}
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig(engineType));
SqlParser sqlParser =
SqlParser.create(expression, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode);
return sqlNode;
@@ -92,12 +94,15 @@ public abstract class SemanticNode {
public static SqlNode buildAs(String asName, SqlNode sqlNode) throws Exception {
SqlAsOperator sqlAsOperator = new SqlAsOperator();
SqlIdentifier sqlIdentifier = new SqlIdentifier(asName, SqlParserPos.ZERO);
return new SqlBasicCall(sqlAsOperator, new ArrayList<>(Arrays.asList(sqlNode, sqlIdentifier)),
return new SqlBasicCall(
sqlAsOperator,
new ArrayList<>(Arrays.asList(sqlNode, sqlIdentifier)),
SqlParserPos.ZERO);
}
public static String getSql(SqlNode sqlNode, EngineType engineType) {
UnaryOperator<SqlWriterConfig> sqlWriterConfigUnaryOperator = (c) -> getSqlWriterConfig(engineType);
UnaryOperator<SqlWriterConfig> sqlWriterConfigUnaryOperator =
(c) -> getSqlWriterConfig(engineType);
return sqlNode.toSqlString(sqlWriterConfigUnaryOperator).getSql();
}
@@ -134,7 +139,8 @@ public abstract class SemanticNode {
public static SqlNode getAlias(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getKind().equals(SqlKind.AS) && sqlBasicCall.getOperandList().size() > 1) {
if (sqlBasicCall.getKind().equals(SqlKind.AS)
&& sqlBasicCall.getOperandList().size() > 1) {
return sqlBasicCall.getOperandList().get(1);
}
}
@@ -149,9 +155,17 @@ public abstract class SemanticNode {
if (table instanceof SqlSelect) {
SqlSelect tableSelect = (SqlSelect) table;
return tableSelect.getSelectList().stream()
.map(s -> (s instanceof SqlIdentifier) ? ((SqlIdentifier) s).names.get(0)
: (((s instanceof SqlBasicCall) && s.getKind().equals(SqlKind.AS))
? ((SqlBasicCall) s).getOperandList().get(1).toString() : ""))
.map(
s ->
(s instanceof SqlIdentifier)
? ((SqlIdentifier) s).names.get(0)
: (((s instanceof SqlBasicCall)
&& s.getKind().equals(SqlKind.AS))
? ((SqlBasicCall) s)
.getOperandList()
.get(1)
.toString()
: ""))
.collect(Collectors.toSet());
}
return new HashSet<>();
@@ -172,11 +186,17 @@ public abstract class SemanticNode {
private static SqlWriterConfig getSqlWriterConfig(EngineType engineType) {
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(sqlDialect)
.withKeywordsLowerCase(false).withClauseEndsLine(true).withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false).withUpdateSetListNewline(false).withIndentation(0);
SqlWriterConfig config =
SqlPrettyWriter.config()
.withDialect(sqlDialect)
.withKeywordsLowerCase(false)
.withClauseEndsLine(true)
.withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false)
.withUpdateSetListNewline(false)
.withIndentation(0);
if (EngineType.MYSQL.equals(engineType)) {
//no backticks around function name
// no backticks around function name
config = config.withQuoteAllIdentifiers(false);
}
return config;
@@ -191,8 +211,10 @@ public abstract class SemanticNode {
case AS:
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getOperandList().get(0).getKind().equals(SqlKind.IDENTIFIER)) {
addTableName(sqlBasicCall.getOperandList().get(0).toString(),
sqlBasicCall.getOperandList().get(1).toString(), parseInfo);
addTableName(
sqlBasicCall.getOperandList().get(0).toString(),
sqlBasicCall.getOperandList().get(1).toString(),
parseInfo);
} else {
sqlVisit(sqlBasicCall.getOperandList().get(0), parseInfo);
}
@@ -203,13 +225,17 @@ public abstract class SemanticNode {
sqlVisit(sqlJoin.getRight(), parseInfo);
SqlBasicCall condition = (SqlBasicCall) sqlJoin.getCondition();
if (Objects.nonNull(condition)) {
condition.getOperandList().stream().forEach(c -> addTagField(c.toString(), parseInfo, ""));
condition.getOperandList().stream()
.forEach(c -> addTagField(c.toString(), parseInfo, ""));
}
break;
case UNION:
((SqlBasicCall) sqlNode).getOperandList().forEach(node -> {
sqlVisit(node, parseInfo);
});
((SqlBasicCall) sqlNode)
.getOperandList()
.forEach(
node -> {
sqlVisit(node, parseInfo);
});
break;
case WITH:
SqlWith sqlWith = (SqlWith) sqlNode;
@@ -226,9 +252,12 @@ public abstract class SemanticNode {
}
SqlSelect sqlSelect = (SqlSelect) select;
SqlNodeList selectList = sqlSelect.getSelectList();
selectList.getList().forEach(list -> {
fieldVisit(list, parseInfo, "");
});
selectList
.getList()
.forEach(
list -> {
fieldVisit(list, parseInfo, "");
});
fromVisit(sqlSelect.getFrom(), parseInfo);
if (sqlSelect.hasWhere()) {
whereVisit((SqlBasicCall) sqlSelect.getWhere(), parseInfo);
@@ -238,16 +267,17 @@ public abstract class SemanticNode {
}
SqlNodeList group = sqlSelect.getGroup();
if (group != null) {
group.forEach(groupField -> {
if (groupHints.contains(groupField.toString())) {
int groupIdx = Integer.valueOf(groupField.toString()) - 1;
if (selectList.getList().size() > groupIdx) {
fieldVisit(selectList.get(groupIdx), parseInfo, "");
}
} else {
fieldVisit(groupField, parseInfo, "");
}
});
group.forEach(
groupField -> {
if (groupHints.contains(groupField.toString())) {
int groupIdx = Integer.valueOf(groupField.toString()) - 1;
if (selectList.getList().size() > groupIdx) {
fieldVisit(selectList.get(groupIdx), parseInfo, "");
}
} else {
fieldVisit(groupField, parseInfo, "");
}
});
}
}
@@ -255,7 +285,8 @@ public abstract class SemanticNode {
if (where == null) {
return;
}
if (where.operandCount() == 2 && where.operand(0).getKind().equals(SqlKind.IDENTIFIER)
if (where.operandCount() == 2
&& where.operand(0).getKind().equals(SqlKind.IDENTIFIER)
&& where.operand(1).getKind().equals(SqlKind.LITERAL)) {
fieldVisit(where.operand(0), parseInfo, "");
return;
@@ -263,21 +294,21 @@ public abstract class SemanticNode {
// 子查询
if (where.operandCount() == 2
&& (where.operand(0).getKind().equals(SqlKind.IDENTIFIER)
&& (where.operand(1).getKind().equals(SqlKind.SELECT)
|| where.operand(1).getKind().equals(SqlKind.ORDER_BY)))
) {
&& (where.operand(1).getKind().equals(SqlKind.SELECT)
|| where.operand(1).getKind().equals(SqlKind.ORDER_BY)))) {
fieldVisit(where.operand(0), parseInfo, "");
sqlVisit((SqlNode) (where.operand(1)), parseInfo);
return;
}
if (CollectionUtils.isNotEmpty(where.getOperandList()) && where.operand(0).getKind()
.equals(SqlKind.IDENTIFIER)) {
if (CollectionUtils.isNotEmpty(where.getOperandList())
&& where.operand(0).getKind().equals(SqlKind.IDENTIFIER)) {
fieldVisit(where.operand(0), parseInfo, "");
}
if (where.operandCount() >= 2 && where.operand(1).getKind().equals(SqlKind.IDENTIFIER)) {
fieldVisit(where.operand(1), parseInfo, "");
}
if (CollectionUtils.isNotEmpty(where.getOperandList()) && where.operand(0) instanceof SqlBasicCall) {
if (CollectionUtils.isNotEmpty(where.getOperandList())
&& where.operand(0) instanceof SqlBasicCall) {
whereVisit(where.operand(0), parseInfo);
}
if (where.operandCount() >= 2 && where.operand(1) instanceof SqlBasicCall) {
@@ -290,7 +321,7 @@ public abstract class SemanticNode {
return;
}
SqlKind kind = field.getKind();
//System.out.println(kind);
// System.out.println(kind);
// aggfunction
if (AGGREGATION_KIND.contains(kind)) {
SqlOperator sqlCall = ((SqlCall) field).getOperator();
@@ -319,9 +350,12 @@ public abstract class SemanticNode {
}
}
if (field instanceof SqlNodeList) {
((SqlNodeList) field).getList().forEach(node -> {
fieldVisit(node, parseInfo, "");
});
((SqlNodeList) field)
.getList()
.forEach(
node -> {
fieldVisit(node, parseInfo, "");
});
}
}
@@ -330,7 +364,8 @@ public abstract class SemanticNode {
if (!parseInfo.containsKey(Constants.SQL_PARSER_FIELD)) {
parseInfo.put(Constants.SQL_PARSER_FIELD, new HashMap<>());
}
Map<String, Set<String>> fields = (Map<String, Set<String>>) parseInfo.get(Constants.SQL_PARSER_FIELD);
Map<String, Set<String>> fields =
(Map<String, Set<String>>) parseInfo.get(Constants.SQL_PARSER_FIELD);
if (Pattern.matches("(?i)[a-z\\d_\\.]+", exp)) {
if (exp.contains(".")) {
@@ -345,9 +380,7 @@ public abstract class SemanticNode {
}
fields.get("").add(exp);
}
}
}
private static void fromVisit(SqlNode from, Map<String, Object> parseInfo) {
@@ -374,7 +407,8 @@ public abstract class SemanticNode {
sqlVisit(sqlJoin.getRight(), parseInfo);
SqlBasicCall condition = (SqlBasicCall) sqlJoin.getCondition();
if (Objects.nonNull(condition)) {
condition.getOperandList().stream().forEach(c -> addTagField(c.toString(), parseInfo, ""));
condition.getOperandList().stream()
.forEach(c -> addTagField(c.toString(), parseInfo, ""));
}
break;
case SELECT:
@@ -392,12 +426,12 @@ public abstract class SemanticNode {
if (!parseInfo.containsKey(Constants.SQL_PARSER_TABLE)) {
parseInfo.put(Constants.SQL_PARSER_TABLE, new HashMap<>());
}
Map<String, Set<String>> dbTbs = (Map<String, Set<String>>) parseInfo.get(Constants.SQL_PARSER_TABLE);
Map<String, Set<String>> dbTbs =
(Map<String, Set<String>>) parseInfo.get(Constants.SQL_PARSER_TABLE);
if (!dbTbs.containsKey(alias)) {
dbTbs.put(alias, new HashSet<>());
}
dbTbs.get(alias).add(exp);
}
public static Map<String, Object> getDbTable(SqlNode sqlNode) {
@@ -406,21 +440,28 @@ public abstract class SemanticNode {
return parseInfo;
}
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode,
EngineType engineType) {
public static SqlNode optimize(
SqlValidatorScope scope,
SemanticSchema schema,
SqlNode sqlNode,
EngineType engineType) {
try {
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
hepProgramBuilder.addRuleInstance(new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema));
hepProgramBuilder.addRuleInstance(
new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema));
RelOptPlanner relOptPlanner = new HepPlanner(hepProgramBuilder.build());
RelToSqlConverter converter = new RelToSqlConverter(sqlDialect);
SqlValidator sqlValidator = Configuration.getSqlValidator(
scope.getValidator().getCatalogReader().getRootSchema(), engineType);
SqlToRelConverter sqlToRelConverter = Configuration.getSqlToRelConverter(scope, sqlValidator,
relOptPlanner, engineType);
RelNode sqlRel = sqlToRelConverter.convertQuery(
sqlValidator.validate(sqlNode), false, true).rel;
log.debug("RelNode optimize {}",
SqlValidator sqlValidator =
Configuration.getSqlValidator(
scope.getValidator().getCatalogReader().getRootSchema(), engineType);
SqlToRelConverter sqlToRelConverter =
Configuration.getSqlToRelConverter(
scope, sqlValidator, relOptPlanner, engineType);
RelNode sqlRel =
sqlToRelConverter.convertQuery(sqlValidator.validate(sqlNode), false, true).rel;
log.debug(
"RelNode optimize {}",
SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement(), engineType));
relOptPlanner.setRoot(sqlRel);
RelNode relNode = relOptPlanner.findBestExp();
@@ -470,5 +511,4 @@ public abstract class SemanticNode {
}
return SqlLiteral.createSymbol(JoinType.INNER, SqlParserPos.ZERO);
}
}

View File

@@ -1,9 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.ExtendNode;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
@@ -13,9 +10,11 @@ import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
/**
* extend node to handle lateral explode dataSet
*/
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
/** extend node to handle lateral explode dataSet */
public class LateralViewExplodeNode extends ExtendNode {
public final String sqlNameView = "view";
@@ -61,8 +60,9 @@ public class LateralViewExplodeNode extends ExtendNode {
public void explode(SqlWriter writer, SqlNode sqlNode) {
String delimiter =
Objects.nonNull(delimiterMap) && delimiterMap.containsKey(sqlNode.toString()) ? delimiterMap.get(
sqlNode.toString()) : "";
Objects.nonNull(delimiterMap) && delimiterMap.containsKey(sqlNode.toString())
? delimiterMap.get(sqlNode.toString())
: "";
if (delimiter.isEmpty()) {
writer.sep(sqlNameExplode);
} else {

View File

@@ -1,12 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelRule;
import org.apache.calcite.rel.core.Aggregate;
@@ -26,23 +20,39 @@ import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Pair;
import org.apache.commons.lang3.tuple.Triple;
/**
* push down the time filter into group using the RuntimeOptions defined minMaxTime
*
*/
public class FilterToGroupScanRule extends RelRule<Config>
implements TransformationRule {
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
public static FilterTableScanRule.Config DEFAULT = FilterTableScanRule.Config.DEFAULT.withOperandSupplier((b0) -> {
return b0.operand(LogicalFilter.class).oneInput((b1) -> {
return b1.operand(LogicalProject.class).oneInput((b2) -> {
return b2.operand(LogicalAggregate.class).oneInput((b3) -> {
return b3.operand(LogicalProject.class).anyInputs();
});
});
});
/** push down the time filter into group using the RuntimeOptions defined minMaxTime */
public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule {
}).as(FilterTableScanRule.Config.class);
public static FilterTableScanRule.Config DEFAULT =
FilterTableScanRule.Config.DEFAULT
.withOperandSupplier(
(b0) -> {
return b0.operand(LogicalFilter.class)
.oneInput(
(b1) -> {
return b1.operand(LogicalProject.class)
.oneInput(
(b2) -> {
return b2.operand(
LogicalAggregate
.class)
.oneInput(
(b3) -> {
return b3.operand(
LogicalProject
.class)
.anyInputs();
});
});
});
})
.as(FilterTableScanRule.Config.class);
private SemanticSchema semanticSchema;
@@ -55,9 +65,9 @@ public class FilterToGroupScanRule extends RelRule<Config>
if (call.rels.length != 4) {
return;
}
if (Objects.isNull(semanticSchema.getRuntimeOptions()) || Objects.isNull(
semanticSchema.getRuntimeOptions().getMinMaxTime()) || semanticSchema.getRuntimeOptions()
.getMinMaxTime().getLeft().isEmpty()) {
if (Objects.isNull(semanticSchema.getRuntimeOptions())
|| Objects.isNull(semanticSchema.getRuntimeOptions().getMinMaxTime())
|| semanticSchema.getRuntimeOptions().getMinMaxTime().getLeft().isEmpty()) {
return;
}
Triple<String, String, String> minMax = semanticSchema.getRuntimeOptions().getMinMaxTime();
@@ -65,17 +75,20 @@ public class FilterToGroupScanRule extends RelRule<Config>
Project project0 = (Project) call.rel(1);
Project project1 = (Project) call.rel(3);
Aggregate logicalAggregate = (Aggregate) call.rel(2);
Optional<Pair<RexNode, String>> isIn = project1.getNamedProjects()
.stream().filter(i -> i.right.equalsIgnoreCase(minMax.getLeft())).findFirst();
Optional<Pair<RexNode, String>> isIn =
project1.getNamedProjects().stream()
.filter(i -> i.right.equalsIgnoreCase(minMax.getLeft()))
.findFirst();
if (!isIn.isPresent()) {
return;
}
RelBuilder relBuilder = call.builder();
relBuilder.push(project1);
RexNode addPartitionCondition = getRexNodeByTimeRange(relBuilder, minMax.getLeft(), minMax.getMiddle(),
minMax.getRight());
relBuilder.filter(new RexNode[]{addPartitionCondition});
RexNode addPartitionCondition =
getRexNodeByTimeRange(
relBuilder, minMax.getLeft(), minMax.getMiddle(), minMax.getRight());
relBuilder.filter(new RexNode[] {addPartitionCondition});
relBuilder.project(project1.getProjects());
ImmutableBitSet newGroupSet = logicalAggregate.getGroupSet();
int newGroupCount = newGroupSet.cardinality();
@@ -85,20 +98,30 @@ public class FilterToGroupScanRule extends RelRule<Config>
while (var.hasNext()) {
AggregateCall aggCall = (AggregateCall) var.next();
newAggCalls.add(
aggCall.adaptTo(project1, aggCall.getArgList(), aggCall.filterArg, groupCount, newGroupCount));
aggCall.adaptTo(
project1,
aggCall.getArgList(),
aggCall.filterArg,
groupCount,
newGroupCount));
}
relBuilder.aggregate(relBuilder.groupKey(newGroupSet), newAggCalls);
relBuilder.project(project0.getProjects());
relBuilder.filter(new RexNode[]{filter.getCondition()});
relBuilder.filter(new RexNode[] {filter.getCondition()});
call.transformTo(relBuilder.build());
}
private RexNode getRexNodeByTimeRange(RelBuilder relBuilder, String dateField, String start, String end) {
return relBuilder.call(SqlStdOperatorTable.AND,
relBuilder.call(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, relBuilder.field(dateField),
private RexNode getRexNodeByTimeRange(
RelBuilder relBuilder, String dateField, String start, String end) {
return relBuilder.call(
SqlStdOperatorTable.AND,
relBuilder.call(
SqlStdOperatorTable.GREATER_THAN_OR_EQUAL,
relBuilder.field(dateField),
relBuilder.literal(start)),
relBuilder.call(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, relBuilder.field(dateField),
relBuilder.call(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
relBuilder.field(dateField),
relBuilder.literal(end)));
}
}

View File

@@ -1,17 +1,21 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -19,24 +23,23 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
/**
* process query specified filtering information
*/
/** process query specified filtering information */
public class FilterRender extends Renderer {
@Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
public void render(
MetricQueryParam metricCommand,
List<DataSource> dataSources,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
@@ -46,8 +49,14 @@ public class FilterRender extends Renderer {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataSource dataSource : dataSources) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(), metricCommand.getDimensions(),
dataSource, schema, dimensions, metrics);
SourceRender.whereDimMetric(
fieldWhere,
metricCommand.getMetrics(),
metricCommand.getDimensions(),
dataSource,
schema,
dimensions,
metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
@@ -62,14 +71,18 @@ public class FilterRender extends Renderer {
continue;
}
if (optionalMetric.isPresent()) {
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope, engineType));
tableView
.getMeasure()
.add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
}
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX, tableView.build()));
filterView.setTable(
SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_FILTER_PREFIX, tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;

View File

@@ -2,14 +2,14 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode;
@@ -43,17 +43,21 @@ import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
/**
* process the join conditions when the source number is greater than 1
*/
/** process the join conditions when the source number is greater than 1 */
@Slf4j
public class JoinRender extends Renderer {
@Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
public void render(
MetricQueryParam metricCommand,
List<DataSource> dataSources,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
@@ -78,7 +82,13 @@ public class JoinRender extends Renderer {
final Set<String> filterMetrics = new HashSet<>();
final List<String> queryDimension = new ArrayList<>();
final List<String> queryMetrics = new ArrayList<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataSource, schema, filterDimensions,
SourceRender.whereDimMetric(
fieldWhere,
queryMetrics,
queryDimension,
dataSource,
schema,
filterDimensions,
filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
@@ -88,13 +98,32 @@ public class JoinRender extends Renderer {
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataSource, sourceMeasure, scope, schema,
Set<String> sourceMeasure =
dataSource.getMeasures().stream()
.map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(
innerSelect,
filterView,
queryMetrics,
reqMetric,
dataSource,
sourceMeasure,
scope,
schema,
nonAgg);
Set<String> dimension = dataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataSource, dimension, scope,
Set<String> dimension =
dataSource.getDimensions().stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(
innerSelect,
filterDimension,
queryDimension,
reqDimension,
dataSource,
dimension,
scope,
schema);
List<String> primary = new ArrayList<>();
for (Identify identify : dataSource.getIdentifiers()) {
@@ -105,8 +134,17 @@ public class JoinRender extends Renderer {
}
List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataSource, dataSourceWhere);
TableView tableView = SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataSources.get(i), scope, schema, true);
TableView tableView =
SourceRender.renderOne(
"",
dataSourceWhere,
queryMetrics,
queryDimension,
metricCommand.getWhere(),
dataSources.get(i),
scope,
schema,
true);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
tableView.setAlias(alias);
@@ -127,7 +165,8 @@ public class JoinRender extends Renderer {
innerView.getMeasure().add(entry.getValue());
}
innerView.setTable(left);
filterView.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
filterView.setTable(
SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
@@ -135,83 +174,127 @@ public class JoinRender extends Renderer {
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
}
}
}
super.tableView = filterView;
}
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, List<String> queryMetrics,
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
private void doMetric(
Map<String, SqlNode> innerSelect,
TableView filterView,
List<String> queryMetrics,
List<String> reqMetrics,
DataSource dataSource,
Set<String> sourceMeasure,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
MetricNode metricNode =
buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure,
SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
innerSelect.put(
measure,
SemanticNode.buildAs(
measure,
SemanticNode.parse(
alias + "." + measure, scope, engineType)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope, engineType)));
filterView
.getMeasure()
.add(
SemanticNode.buildAs(
entry.getKey(),
SemanticNode.parse(
entry.getKey(),
scope,
engineType)));
} else {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope, engineType)));
filterView
.getMeasure()
.add(
SemanticNode.buildAs(
entry.getKey(),
AggFunctionNode.build(
entry.getValue(),
entry.getKey(),
scope,
engineType)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, List<String> queryDimension,
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
SemanticSchema schema) throws Exception {
private void doDimension(
Map<String, SqlNode> innerSelect,
Set<String> filterDimension,
List<String> queryDimension,
List<String> reqDimensions,
DataSource dataSource,
Set<String> dimension,
SqlValidatorScope scope,
SemanticSchema schema)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d,
SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + identifyDimension[1], scope, engineType)));
innerSelect.put(
d,
SemanticNode.buildAs(
d,
SemanticNode.parse(
alias + "." + identifyDimension[1],
scope,
engineType)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope, engineType)));
innerSelect.put(
d,
SemanticNode.buildAs(
d, SemanticNode.parse(alias + "." + d, scope, engineType)));
}
filterDimension.add(d);
}
}
}
private Set<String> getQueryDimension(Set<String> filterDimension, Set<String> queryAllDimension,
Set<String> whereFields) {
return filterDimension.stream().filter(d -> queryAllDimension.contains(d) || whereFields.contains(d)).collect(
Collectors.toSet());
private Set<String> getQueryDimension(
Set<String> filterDimension, Set<String> queryAllDimension, Set<String> whereFields) {
return filterDimension.stream()
.filter(d -> queryAllDimension.contains(d) || whereFields.contains(d))
.collect(Collectors.toSet());
}
private boolean getMatchMetric(SemanticSchema schema, Set<String> sourceMeasure, String m,
List<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream().filter(mm -> mm.getName().equalsIgnoreCase(m))
.findFirst();
private boolean getMatchMetric(
SemanticSchema schema, Set<String> sourceMeasure, String m, List<String> queryMetrics) {
Optional<Metric> metric =
schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m))
.findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
Set<String> metricMeasures =
metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName())
.collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
@@ -225,8 +308,12 @@ public class JoinRender extends Renderer {
return isAdd;
}
private boolean getMatchDimension(SemanticSchema schema, Set<String> sourceDimension, DataSource dataSource,
String d, List<String> queryDimension) {
private boolean getMatchDimension(
SemanticSchema schema,
Set<String> sourceDimension,
DataSource dataSource,
String d,
List<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
@@ -258,11 +345,19 @@ public class JoinRender extends Renderer {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before,
DataSource dataSource, SemanticSchema schema, SqlValidatorScope scope)
private SqlNode buildJoin(
SqlNode left,
TableView leftTable,
TableView tableView,
Map<String, String> before,
DataSource dataSource,
SemanticSchema schema,
SqlValidatorScope scope)
throws Exception {
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope, engineType);
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
SqlNode condition =
getCondition(leftTable, tableView, dataSource, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null;
@@ -272,13 +367,18 @@ public class JoinRender extends Renderer {
condition = joinRelationCondition;
}
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType())
|| Materialization.TimePartType.ZIPPER.equals(tableView.getDataSource().getTimePartType())) {
SqlNode zipperCondition = getZipperCondition(leftTable, tableView, dataSource, schema, scope);
|| Materialization.TimePartType.ZIPPER.equals(
tableView.getDataSource().getTimePartType())) {
SqlNode zipperCondition =
getZipperCondition(leftTable, tableView, dataSource, schema, scope);
if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
SqlParserPos.ZERO, null);
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(
Arrays.asList(zipperCondition, joinRelationCondition)),
SqlParserPos.ZERO,
null);
} else {
condition = zipperCondition;
}
@@ -291,29 +391,38 @@ public class JoinRender extends Renderer {
sqlLiteral,
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO),
condition
);
condition);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView, SemanticSchema schema) {
private JoinRelation getMatchJoinRelation(
Map<String, String> before, TableView tableView, SemanticSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataSource().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight())).collect(
Collectors.toList()));
matchJoinRelation.setJoinCondition(
joinRelation.getJoinCondition().stream()
.map(
r ->
Triple.of(
before.get(joinRelation.getLeft())
+ "."
+ r.getLeft(),
r.getMiddle(),
tableView.getAlias()
+ "."
+ r.getRight()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope, EngineType engineType)
private SqlNode getCondition(
JoinRelation joinRelation, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
@@ -321,26 +430,38 @@ public class JoinRender extends Renderer {
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO, null);
condition =
new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO,
null);
continue;
}
SqlNode addCondition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO, null);
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO, null);
SqlNode addCondition =
new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO,
null);
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO,
null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
SqlValidatorScope scope, EngineType engineType) throws Exception {
private SqlNode getCondition(
TableView left,
TableView right,
DataSource dataSource,
SemanticSchema schema,
SqlValidatorScope scope,
EngineType engineType)
throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
@@ -364,25 +485,27 @@ public class JoinRender extends Renderer {
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
condition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO, null);
SqlNode addCondition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO,
null);
}
return condition;
}
private static void joinOrder(int cnt, String id, Map<String, Set<String>> next, Queue<String> orders,
private static void joinOrder(
int cnt,
String id,
Map<String, Set<String>> next,
Queue<String> orders,
Map<String, Boolean> visited) {
visited.put(id, true);
orders.add(id);
@@ -404,32 +527,42 @@ public class JoinRender extends Renderer {
private void addZipperField(DataSource dataSource, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) {
dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())
) {
fields.add(t.getName());
}
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())
) {
fields.add(t.getName());
}
});
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.forEach(
t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
});
}
}
private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
SqlValidatorScope scope) throws Exception {
private SqlNode getZipperCondition(
TableView left,
TableView right,
DataSource dataSource,
SemanticSchema schema,
SqlValidatorScope scope)
throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
&& Materialization.TimePartType.ZIPPER.equals(right.getDataSource().getTimePartType())) {
&& Materialization.TimePartType.ZIPPER.equals(
right.getDataSource().getTimePartType())) {
throw new Exception("not support two zipper table");
}
SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
Optional<Dimension> rightTime = right.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
Optional<Dimension> leftTime =
left.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
Optional<Dimension> rightTime =
right.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) {
String startTime = "";
@@ -437,47 +570,92 @@ public class JoinRender extends Renderer {
String dateTime = "";
Optional<Dimension> startTimeOp =
(Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) ? left : right)
(Materialization.TimePartType.ZIPPER.equals(
left.getDataSource().getTimePartType())
? left
: right)
.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)).findFirst();
.filter(
d ->
Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(
d.getType()))
.filter(
d ->
d.getName()
.startsWith(
Constants
.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp =
(Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) ? left : right)
(Materialization.TimePartType.ZIPPER.equals(
left.getDataSource().getTimePartType())
? left
: right)
.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)).findFirst();
.filter(
d ->
Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(
d.getType()))
.filter(
d ->
d.getName()
.startsWith(
Constants
.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? left : right;
Materialization.TimePartType.ZIPPER.equals(
left.getDataSource().getTimePartType())
? left
: right;
TableView partMetric =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? right : left;
Materialization.TimePartType.ZIPPER.equals(
left.getDataSource().getTimePartType())
? right
: left;
Optional<Dimension> partTime =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? rightTime : leftTime;
Materialization.TimePartType.ZIPPER.equals(
left.getDataSource().getTimePartType())
? rightTime
: leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
ArrayList<SqlNode> operandList = new ArrayList<>(
Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
ArrayList<SqlNode> operandList =
new ArrayList<>(
Arrays.asList(
SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(new SqlBasicCall(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(
Arrays.asList(SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null), new SqlBasicCall(
SqlStdOperatorTable.GREATER_THAN,
operandList,
SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null);
new ArrayList<SqlNode>(
Arrays.asList(
new SqlBasicCall(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(
Arrays.asList(
SemanticNode.parse(
startTime,
scope,
engineType),
SemanticNode.parse(
dateTime,
scope,
engineType))),
SqlParserPos.ZERO,
null),
new SqlBasicCall(
SqlStdOperatorTable.GREATER_THAN,
operandList,
SqlParserPos.ZERO,
null))),
SqlParserPos.ZERO,
null);
}
return condition;
}

View File

@@ -1,18 +1,14 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
@@ -20,16 +16,23 @@ import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
/**
* process the query result items from query request
*/
import java.util.ArrayList;
import java.util.List;
/** process the query result items from query request */
public class OutputRender extends Renderer {
@Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
public void render(
MetricQueryParam metricCommand,
List<DataSource> dataSources,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
@@ -42,15 +45,20 @@ public class OutputRender extends Renderer {
}
if (metricCommand.getLimit() > 0) {
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
SqlNode offset =
SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope, engineType)}));
orderList.add(
SqlStdOperatorTable.DESC.createCall(
SqlParserPos.ZERO,
new SqlNode[] {
SemanticNode.parse(columnOrder.getCol(), scope, engineType)
}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
}

View File

@@ -1,17 +1,15 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import static com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants.DIMENSION_DELIMITER;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode;
@@ -20,7 +18,12 @@ import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNo
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.util.Litmus;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -33,22 +36,23 @@ import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.util.Litmus;
import org.springframework.util.CollectionUtils;
import static com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants.DIMENSION_DELIMITER;
/**
* process the table dataSet from the defined data model schema
*/
/** process the table dataSet from the defined data model schema */
@Slf4j
public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions,
String queryWhere, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
public static TableView renderOne(
String alias,
List<String> fieldWheres,
List<String> reqMetrics,
List<String> reqDimensions,
String queryWhere,
DataSource datasource,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
@@ -59,36 +63,63 @@ public class SourceRender extends Renderer {
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics);
whereDimMetric(
fieldWhere,
queryMetrics,
queryDimensions,
datasource,
schema,
dimensions,
metrics);
queryMetrics.addAll(metrics);
queryMetrics = uniqList(queryMetrics);
queryDimensions.addAll(dimensions);
queryDimensions = uniqList(queryDimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope,
schema, nonAgg);
mergeWhere(
fieldWhere,
dataSet,
output,
queryMetrics,
queryDimensions,
extendFields,
datasource,
scope,
schema,
nonAgg);
}
addTimeDimension(datasource, queryDimensions);
for (String metric : queryMetrics) {
MetricNode metricNode = buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
MetricNode metricNode =
buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream().forEach(m -> output.getMeasure().add(m.getValue()));
metricNode.getAggNode().entrySet().stream()
.forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream().forEach(m -> dataSet.getMeasure().add(m.getValue()));
metricNode.getNonAggNode().entrySet().stream()
.forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream().forEach(m -> dataSet.getFilter().add(m.getValue()));
metricNode.getMeasureFilter().entrySet().stream()
.forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY) && queryDimensions.contains(
dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY)
&& queryDimensions.contains(dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
continue;
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension, datasource, schema, nonAgg, extendFields, dataSet, output,
buildDimension(
dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY)
? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension,
datasource,
schema,
nonAgg,
extendFields,
dataSet,
output,
scope);
}
@@ -97,13 +128,17 @@ public class SourceRender extends Renderer {
SqlNode tableNode = DataSourceNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() + "_" + UUID.randomUUID().toString()
.substring(32), dataSet.build()));
output.setTable(
SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_OUT_PREFIX
+ datasource.getName()
+ "_"
+ UUID.randomUUID().toString().substring(32),
dataSet.build()));
return output;
}
private static List<SqlNode> deduplicateNode(List<SqlNode> listNode) { //List<SqlNode>去重
private static List<SqlNode> deduplicateNode(List<SqlNode> listNode) { // List<SqlNode>去重
List<SqlNode> uniqueElements = new ArrayList<>();
for (SqlNode element : listNode) {
if (!containsElement(uniqueElements, element)) {
@@ -113,7 +148,8 @@ public class SourceRender extends Renderer {
return uniqueElements;
}
private static boolean containsElement(List<SqlNode> list, SqlNode element) { //检查List<SqlNode>中是否含有某element
private static boolean containsElement(
List<SqlNode> list, SqlNode element) { // 检查List<SqlNode>中是否含有某element
for (SqlNode i : list) {
if (i.equalsDeep(element, Litmus.IGNORE)) {
return true;
@@ -122,12 +158,20 @@ public class SourceRender extends Renderer {
return false;
}
private static void buildDimension(String alias, String dimension, DataSource datasource, SemanticSchema schema,
boolean nonAgg, Map<String, String> extendFields, TableView dataSet, TableView output,
private static void buildDimension(
String alias,
String dimension,
DataSource datasource,
SemanticSchema schema,
boolean nonAgg,
Map<String, String> extendFields,
TableView dataSet,
TableView output,
SqlValidatorScope scope)
throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
@@ -145,22 +189,29 @@ public class SourceRender extends Renderer {
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope, engineType));
} else {
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
output.getDimension()
.add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
}
isAdd = true;
break;
}
}
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
Optional<Identify> identify =
datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension))
.findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
}
isAdd = true;
}
@@ -170,35 +221,45 @@ public class SourceRender extends Renderer {
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
dataSet.getMeasure()
.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
output.getMeasure()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return;
}
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
output.getDimension()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
}
}
private static void addExtendFields(Dimension dimension, Map<String, String> extendFields) {
if (dimension.getDataType().isArray()) {
if (Objects.nonNull(dimension.getExt()) && dimension.getExt()
.containsKey(DIMENSION_DELIMITER)) {
extendFields.put(dimension.getExpr(),
(String) dimension.getExt().get(DIMENSION_DELIMITER));
if (Objects.nonNull(dimension.getExt())
&& dimension.getExt().containsKey(DIMENSION_DELIMITER)) {
extendFields.put(
dimension.getExpr(), (String) dimension.getExt().get(DIMENSION_DELIMITER));
} else {
extendFields.put(dimension.getExpr(), "");
}
}
}
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, Map<String, String> extendFields, DataSource datasource,
private static List<SqlNode> getWhereMeasure(
List<String> fields,
List<String> queryMetrics,
List<String> queryDimensions,
Map<String, String> extendFields,
DataSource datasource,
SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
SemanticSchema schema,
boolean nonAgg)
throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -234,21 +295,39 @@ public class SourceRender extends Renderer {
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
private static void mergeWhere(
List<String> fields,
TableView dataSet,
TableView outputSet,
List<String> queryMetrics,
List<String> queryDimensions, Map<String, String> extendFields, DataSource datasource,
List<String> queryDimensions,
Map<String, String> extendFields,
DataSource datasource,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg) throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource,
scope, schema,
nonAgg);
boolean nonAgg)
throws Exception {
List<SqlNode> whereNode =
getWhereMeasure(
fields,
queryMetrics,
queryDimensions,
extendFields,
datasource,
scope,
schema,
nonAgg);
dataSet.getMeasure().addAll(whereNode);
//getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SemanticSchema schema, Set<String> dimensions,
public static void whereDimMetric(
List<String> fields,
List<String> queryMetrics,
List<String> queryDimensions,
DataSource datasource,
SemanticSchema schema,
Set<String> dimensions,
Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
@@ -262,42 +341,59 @@ public class SourceRender extends Renderer {
}
}
private static void addField(String field, String oriField, DataSource datasource, SemanticSchema schema,
private static void addField(
String field,
String oriField,
DataSource datasource,
SemanticSchema schema,
Set<String> dimensions,
Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
Optional<Dimension> dimension =
datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field))
.findFirst();
if (dimension.isPresent()) {
dimensions.add(oriField);
return;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
Optional<Identify> identify =
datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field))
.findFirst();
if (identify.isPresent()) {
dimensions.add(oriField);
return;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
Optional<Dimension> dataSourceDim =
schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(field))
.findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(oriField);
return;
}
}
Optional<Measure> metric = datasource.getMeasures()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
Optional<Measure> metric =
datasource.getMeasures().stream()
.filter(m -> m.getName().equalsIgnoreCase(field))
.findFirst();
if (metric.isPresent()) {
metrics.add(oriField);
return;
}
Optional<Metric> datasourceMetric = schema.getMetrics()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
Optional<Metric> datasourceMetric =
schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(field))
.findFirst();
if (datasourceMetric.isPresent()) {
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures().stream()
.map(m -> m.getName()).collect(
Collectors.toSet());
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
Set<String> measures =
datasourceMetric.get().getMetricTypeParams().getMeasures().stream()
.map(m -> m.getName())
.collect(Collectors.toSet());
if (datasource.getMeasures().stream()
.map(m -> m.getName())
.collect(Collectors.toSet())
.containsAll(measures)) {
metrics.add(oriField);
return;
@@ -306,19 +402,25 @@ public class SourceRender extends Renderer {
}
public static boolean isDimension(String name, DataSource datasource, SemanticSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
Optional<Dimension> dimension =
datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
Optional<Identify> identify =
datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
Optional<Dimension> dataSourceDim =
schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
@@ -328,12 +430,30 @@ public class SourceRender extends Renderer {
private static void addTimeDimension(DataSource dataSource, List<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) {
Optional<Dimension> startTimeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)).findFirst();
Optional<Dimension> endTimeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)).findFirst();
Optional<Dimension> startTimeOp =
dataSource.getDimensions().stream()
.filter(
d ->
Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(
d.getType()))
.filter(
d ->
d.getName()
.startsWith(
Constants.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp =
dataSource.getDimensions().stream()
.filter(
d ->
Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(
d.getType()))
.filter(
d ->
d.getName()
.startsWith(
Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && !queryDimension.contains(startTimeOp.get().getName())) {
queryDimension.add(startTimeOp.get().getName());
}
@@ -341,20 +461,31 @@ public class SourceRender extends Renderer {
queryDimension.add(endTimeOp.get().getName());
}
} else {
Optional<Dimension> timeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
Optional<Dimension> timeOp =
dataSource.getDimensions().stream()
.filter(
d ->
Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(
d.getType()))
.findFirst();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
queryDimension.add(timeOp.get().getName());
}
}
}
public void render(MetricQueryParam metricQueryParam, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
public void render(
MetricQueryParam metricQueryParam,
List<DataSource> dataSources,
SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg)
throws Exception {
String queryWhere = metricQueryParam.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
@@ -362,14 +493,21 @@ public class SourceRender extends Renderer {
}
if (dataSources.size() == 1) {
DataSource dataSource = dataSources.get(0);
super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(),
metricQueryParam.getDimensions(),
metricQueryParam.getWhere(), dataSource, scope, schema, nonAgg);
super.tableView =
renderOne(
"",
fieldWhere,
metricQueryParam.getMetrics(),
metricQueryParam.getDimensions(),
metricQueryParam.getWhere(),
dataSource,
scope,
schema,
nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(metricQueryParam, dataSources, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -13,30 +13,28 @@ import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
/**
* supplement the QueryStatement when query with custom aggregation method
*/
/** supplement the QueryStatement when query with custom aggregation method */
@Component("CalculateAggConverter")
@Slf4j
public class CalculateAggConverter implements QueryConverter {
public interface EngineSql {
String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql);
}
public DataSetQueryParam generateSqlCommend(QueryStatement queryStatement,
EngineType engineTypeEnum, String version)
public DataSetQueryParam generateSqlCommend(
QueryStatement queryStatement, EngineType engineTypeEnum, String version)
throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam();
@@ -55,16 +53,24 @@ public class CalculateAggConverter implements QueryConverter {
metricTable.setWhere(where);
metricTable.setAggOption(AggOption.AGGREGATION);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
String sql = String.format("select %s from %s %s %s %s", sqlGenerateUtils.getSelect(queryParam),
metricTableName,
sqlGenerateUtils.getGroupBy(queryParam), sqlGenerateUtils.getOrderBy(queryParam),
sqlGenerateUtils.getLimit(queryParam));
String sql =
String.format(
"select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(queryParam),
metricTableName,
sqlGenerateUtils.getGroupBy(queryParam),
sqlGenerateUtils.getOrderBy(queryParam),
sqlGenerateUtils.getLimit(queryParam));
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
sqlCommand.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s", sqlGenerateUtils.getSelect(queryParam),
metricTableName,
sqlGenerateUtils.getGroupBy(queryParam), sqlGenerateUtils.getOrderBy(queryParam),
sqlGenerateUtils.getLimit(queryParam));
sql =
String.format(
"select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(queryParam),
metricTableName,
sqlGenerateUtils.getGroupBy(queryParam),
sqlGenerateUtils.getOrderBy(queryParam),
sqlGenerateUtils.getLimit(queryParam));
}
sqlCommand.setSql(sql);
return sqlCommand;
@@ -101,27 +107,31 @@ public class CalculateAggConverter implements QueryConverter {
@Override
public void convert(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getSemanticModel().getDatabase();
DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement,
EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
DataSetQueryParam dataSetQueryParam =
generateSqlCommend(
queryStatement,
EngineType.fromString(database.getType().toUpperCase()),
database.getVersion());
queryStatement.setDataSetQueryParam(dataSetQueryParam);
}
/**
* Ratio
*/
/** Ratio */
public boolean isRatioAccept(QueryParam queryParam) {
Long ratioFuncNum = queryParam.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) || f.getFunc()
.equals(AggOperatorEnum.RATIO_OVER))).count();
Long ratioFuncNum =
queryParam.getAggregators().stream()
.filter(
f ->
(f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
.count();
if (ratioFuncNum > 0) {
return true;
}
return false;
}
public DataSetQueryParam generateRatioSqlCommand(QueryStatement queryStatement, EngineType engineTypeEnum,
String version)
public DataSetQueryParam generateRatioSqlCommand(
QueryStatement queryStatement, EngineType engineTypeEnum, String version)
throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam();
@@ -151,9 +161,21 @@ public class CalculateAggConverter implements QueryConverter {
sqlCommand.setSupportWith(false);
}
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), metricTableName);
sql =
new MysqlEngineSql()
.sql(
queryParam,
isOver,
sqlCommand.isSupportWith(),
metricTableName);
} else {
sql = new CkEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), metricTableName);
sql =
new CkEngineSql()
.sql(
queryParam,
isOver,
sqlCommand.isSupportWith(),
metricTableName);
}
break;
default:
@@ -165,16 +187,27 @@ public class CalculateAggConverter implements QueryConverter {
public class H2EngineSql implements EngineSql {
public String getOverSelect(QueryParam queryParam, boolean isOver) {
String aggStr = queryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
f.getFunc().getOperator(), f.getColumn());
} else {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
String aggStr =
queryParam.getAggregators().stream()
.map(
f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format(
"( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
f.getColumn(),
f.getColumn(),
f.getColumn(),
f.getColumn(),
f.getFunc().getOperator(),
f.getColumn());
} else {
return f.getColumn();
}
})
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups())
? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr;
}
@@ -194,26 +227,48 @@ public class CalculateAggConverter implements QueryConverter {
return "";
}
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
public String getJoinOn(
QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryParam);
String timeSpan = getTimeSpan(queryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim);
}
if (queryParam.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim, aliasRight + timeDim);
}
return String.format("%s = TIMESTAMPADD(%s,%s) ",
aliasLeft + timeDim, timeSpan, aliasRight + timeDim);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
String aggStr =
queryParam.getAggregators().stream()
.map(
f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.MONTH)) {
return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim,
aliasLeft + timeDim,
timeSpan,
aliasRight + timeDim);
}
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.WEEK)
&& isOver) {
return String.format(
" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(queryParam, isOver, false),
aliasLeft + timeDim,
aliasRight + timeDim);
}
return String.format(
"%s = TIMESTAMPADD(%s,%s) ",
aliasLeft + timeDim,
timeSpan,
aliasRight + timeDim);
} else {
return f.getColumn();
}
})
.collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
@@ -221,43 +276,71 @@ public class CalculateAggConverter implements QueryConverter {
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
return CollectionUtils.isEmpty(groups)
? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@Override
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam), getLimit(queryParam));
String sql =
String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver),
getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."),
metricSql,
metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam),
getLimit(queryParam));
return sql;
}
}
public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
public String getJoinOn(
QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryParam);
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format("toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (queryParam.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
String aggStr =
queryParam.getAggregators().stream()
.map(
f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.MONTH)) {
return String.format(
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim,
aliasRight + timeDim,
timeSpan);
}
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.WEEK)
&& isOver) {
return String.format(
"toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim,
getTimeSpan(queryParam, isOver, false),
aliasRight + timeDim);
}
return String.format(
"%s = date_add(%s,%s) ",
aliasLeft + timeDim,
aliasRight + timeDim,
timeSpan);
} else {
return f.getColumn();
}
})
.collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
@@ -265,7 +348,8 @@ public class CalculateAggConverter implements QueryConverter {
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
return CollectionUtils.isEmpty(groups)
? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@@ -274,18 +358,26 @@ public class CalculateAggConverter implements QueryConverter {
if (!asWith) {
return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
getOverSelect(queryParam, isOver),
getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."),
metricSql,
metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam), getLimit(queryParam));
getOrderBy(queryParam),
getLimit(queryParam));
}
return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
metricSql,
metricSql,
getOverSelect(queryParam, isOver),
getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."),
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam), getLimit(queryParam));
getOrderBy(queryParam),
getLimit(queryParam));
}
}
@@ -308,39 +400,72 @@ public class CalculateAggConverter implements QueryConverter {
}
public String getOverSelect(QueryParam queryParam, boolean isOver) {
String aggStr = queryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format(
"if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
f.getColumn(), f.getFunc().getOperator(), f.getColumn());
} else {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
String aggStr =
queryParam.getAggregators().stream()
.map(
f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format(
"if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
f.getColumn(),
f.getColumn(),
f.getColumn(),
f.getColumn(),
f.getColumn(),
f.getFunc().getOperator(),
f.getColumn());
} else {
return f.getColumn();
}
})
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups())
? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr;
}
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
public String getJoinOn(
QueryParam queryParam, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryParam);
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format("%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (queryParam.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
String aggStr =
queryParam.getAggregators().stream()
.map(
f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.MONTH)) {
return String.format(
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim,
aliasRight + timeDim,
timeSpan);
}
if (queryParam
.getDateInfo()
.getPeriod()
.equals(Constants.WEEK)
&& isOver) {
return String.format(
"to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim,
getTimeSpan(queryParam, isOver, false),
aliasRight + timeDim);
}
return String.format(
"%s = date_add(%s,%s) ",
aliasLeft + timeDim,
aliasRight + timeDim,
timeSpan);
} else {
return f.getColumn();
}
})
.collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
@@ -348,39 +473,49 @@ public class CalculateAggConverter implements QueryConverter {
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
return CollectionUtils.isEmpty(groups)
? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@Override
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam), getLimit(queryParam));
String sql =
String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver),
getAllSelect(queryParam, "t0."),
getAllJoinSelect(queryParam, "t1."),
metricSql,
metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."),
getOrderBy(queryParam),
getLimit(queryParam));
return sql;
}
}
private String getAllJoinSelect(QueryParam queryParam, String alias) {
String aggStr = queryParam.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "")
+ "_roll")
.collect(Collectors.joining(","));
String aggStr =
queryParam.getAggregators().stream()
.map(
f ->
getSelectField(f, alias)
+ " as "
+ getSelectField(f, "")
+ "_roll")
.collect(Collectors.joining(","));
List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) {
groups.add(alias + group + " as " + group + "_roll");
}
return CollectionUtils.isEmpty(groups) ? aggStr
: String.join(",", groups) + "," + aggStr;
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
}
private String getGroupDimWithOutTime(QueryParam queryParam) {
String timeDim = getTimeDim(queryParam);
return queryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
return queryParam.getGroups().stream()
.filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
@@ -397,15 +532,19 @@ public class CalculateAggConverter implements QueryConverter {
}
private String getAllSelect(QueryParam queryParam, String alias) {
String aggStr = queryParam.getAggregators().stream().map(f -> getSelectField(f, alias))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
String aggStr =
queryParam.getAggregators().stream()
.map(f -> getSelectField(f, alias))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups())
? aggStr
: alias + String.join("," + alias, queryParam.getGroups()) + "," + aggStr;
}
private String getSelectField(final Aggregator agg, String alias) {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
if (agg.getFunc().equals(AggOperatorEnum.RATIO_OVER) || agg.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (agg.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| agg.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return alias + agg.getColumn();
}
return sqlGenerateUtils.getSelectField(agg);
@@ -423,22 +562,27 @@ public class CalculateAggConverter implements QueryConverter {
}
private boolean isOverRatio(QueryParam queryParam) {
Long overCt = queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long overCt =
queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER))
.count();
return overCt > 0;
}
private void check(QueryParam queryParam) throws Exception {
Long ratioOverNum = queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
Long ratioOverNum =
queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER))
.count();
Long ratioRollNum =
queryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL))
.count();
if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together ");
}
if (getTimeDim(queryParam).isEmpty()) {
throw new Exception("miss time filter");
}
}
}

View File

@@ -1,12 +1,12 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue;
@@ -16,6 +16,7 @@ import net.sf.jsqlparser.schema.Column;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@@ -33,21 +34,23 @@ public class DefaultDimValueConverter implements QueryConverter {
@Override
public void convert(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getSemanticModel().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList());
List<Dimension> dimensions =
queryStatement.getSemanticModel().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) {
return;
}
String sql = queryStatement.getDataSetQueryParam().getSql();
List<String> whereFields = SqlSelectHelper.getWhereFields(sql)
.stream().filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
.collect(Collectors.toList());
List<String> whereFields =
SqlSelectHelper.getWhereFields(sql).stream()
.filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
.collect(Collectors.toList());
if (!CollectionUtils.isEmpty(whereFields)) {
return;
}
MetricTable metricTable = queryStatement.getDataSetQueryParam()
.getTables().stream().findFirst().orElse(null);
MetricTable metricTable =
queryStatement.getDataSetQueryParam().getTables().stream().findFirst().orElse(null);
List<Expression> expressions = Lists.newArrayList();
for (Dimension dimension : dimensions) {
ExpressionList expressionList = new ExpressionList();
@@ -65,5 +68,4 @@ public class DefaultDimValueConverter implements QueryConverter {
sql = SqlAddHelper.addWhere(sql, expressions);
queryStatement.getDataSetQueryParam().setSql(sql);
}
}
}

View File

@@ -3,21 +3,20 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
/**
* QueryConverter default implement
*/
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/** QueryConverter default implement */
@Component("ParserDefaultConverter")
@Slf4j
public class ParserDefaultConverter implements QueryConverter {
@@ -27,7 +26,8 @@ public class ParserDefaultConverter implements QueryConverter {
if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) {
return false;
}
CalculateAggConverter calculateConverterAgg = ContextUtils.getBean(CalculateAggConverter.class);
CalculateAggConverter calculateConverterAgg =
ContextUtils.getBean(CalculateAggConverter.class);
return !calculateConverterAgg.accept(queryStatement);
}
@@ -36,12 +36,14 @@ public class ParserDefaultConverter implements QueryConverter {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam();
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
MetricQueryParam metricReq = generateSqlCommand(queryStatement.getQueryParam(), queryStatement);
MetricQueryParam metricReq =
generateSqlCommand(queryStatement.getQueryParam(), queryStatement);
queryStatement.setMinMaxTime(sqlGenerateUtils.getBeginEndTime(queryParam, null));
BeanUtils.copyProperties(metricReq, metricQueryParam);
}
public MetricQueryParam generateSqlCommand(QueryParam queryParam, QueryStatement queryStatement) {
public MetricQueryParam generateSqlCommand(
QueryParam queryParam, QueryStatement queryStatement) {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
MetricQueryParam metricQueryParam = new MetricQueryParam();
metricQueryParam.setMetrics(queryParam.getMetrics());
@@ -50,21 +52,24 @@ public class ParserDefaultConverter implements QueryConverter {
log.info("in generateSqlCommend, complete where:{}", where);
metricQueryParam.setWhere(where);
metricQueryParam.setOrder(queryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection())).collect(Collectors.toList()));
metricQueryParam.setOrder(
queryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
metricQueryParam.setLimit(queryParam.getLimit());
// support detail query
if (queryParam.getQueryType().isNativeAggQuery() && CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
if (queryParam.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
Map<Long, DataSource> modelMap = queryStatement.getSemanticModel().getModelMap();
for (Long modelId : modelMap.keySet()) {
String modelBizName = modelMap.get(modelId).getName();
String internalMetricName = sqlGenerateUtils.generateInternalMetricName(modelBizName);
String internalMetricName =
sqlGenerateUtils.generateInternalMetricName(modelBizName);
metricQueryParam.getMetrics().add(internalMetricName);
}
}
return metricQueryParam;
}
}

View File

@@ -2,13 +2,10 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* to supplement,translate the request Body
*/
/** to supplement,translate the request Body */
public interface QueryConverter {
boolean accept(QueryStatement queryStatement);
void convert(QueryStatement queryStatement) throws Exception;
}

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -33,17 +33,21 @@ public class SqlVariableParseConverter implements QueryConverter {
return;
}
for (ModelResp modelResp : modelResps) {
if (ModelDefineType.SQL_QUERY.getName()
if (ModelDefineType.SQL_QUERY
.getName()
.equalsIgnoreCase(modelResp.getModelDetail().getQueryType())) {
String sqlParsed = SqlVariableParseUtils.parse(
modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(),
queryStatement.getQueryParam().getParams()
);
DataSource dataSource = queryStatement.getSemanticModel()
.getDatasourceMap().get(modelResp.getBizName());
String sqlParsed =
SqlVariableParseUtils.parse(
modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(),
queryStatement.getQueryParam().getParams());
DataSource dataSource =
queryStatement
.getSemanticModel()
.getDatasourceMap()
.get(modelResp.getBizName());
dataSource.setSqlQuery(sqlParsed);
}
}
}
}
}

View File

@@ -2,23 +2,21 @@ package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.executor.QueryAccelerator;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.translator.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.QueryParser;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.translator.QueryOptimizer;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.support.SpringFactoriesLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.support.SpringFactoriesLoader;
/**
* QueryConverter QueryOptimizer QueryExecutor object factory
*/
/** QueryConverter QueryOptimizer QueryExecutor object factory */
@Slf4j
public class ComponentFactory {
@@ -88,17 +86,20 @@ public class ComponentFactory {
List<QueryOptimizer> queryOptimizerList = new ArrayList<>();
init(QueryOptimizer.class, queryOptimizerList);
if (!queryOptimizerList.isEmpty()) {
queryOptimizerList.stream().forEach(q -> addQueryOptimizer(q.getClass().getSimpleName(), q));
queryOptimizerList.stream()
.forEach(q -> addQueryOptimizer(q.getClass().getSimpleName(), q));
}
}
private static void initQueryExecutors() {
//queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor", JdbcExecutor.class));
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryExecutor.class, queryExecutors);
}
private static void initQueryAccelerators() {
//queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor", JdbcExecutor.class));
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryAccelerator.class, queryAccelerators);
}
@@ -119,14 +120,15 @@ public class ComponentFactory {
}
private static <T> List<T> init(Class<T> factoryType, List list) {
list.addAll(SpringFactoriesLoader.loadFactories(factoryType,
Thread.currentThread().getContextClassLoader()));
list.addAll(
SpringFactoriesLoader.loadFactories(
factoryType, Thread.currentThread().getContextClassLoader()));
return list;
}
private static <T> T init(Class<T> factoryType) {
return SpringFactoriesLoader.loadFactories(factoryType,
Thread.currentThread().getContextClassLoader()).get(0);
return SpringFactoriesLoader.loadFactories(
factoryType, Thread.currentThread().getContextClassLoader())
.get(0);
}
}

View File

@@ -5,19 +5,21 @@ import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* transform query results to return the users
*/
/** transform query results to return the users */
public class DataTransformUtils {
public static List<Map<String, Object>> transform(List<Map<String, Object>> originalData, String metric,
List<String> groups, DateConf dateConf) {
public static List<Map<String, Object>> transform(
List<Map<String, Object>> originalData,
String metric,
List<String> groups,
DateConf dateConf) {
List<String> dateList = dateConf.getDateList();
List<Map<String, Object>> transposedData = new ArrayList<>();
for (Map<String, Object> originalRow : originalData) {
@@ -27,12 +29,14 @@ public class DataTransformUtils {
transposedRow.put(key, originalRow.get(key));
}
}
transposedRow.put(String.valueOf(originalRow.get(getTimeDimension(dateConf))),
transposedRow.put(
String.valueOf(originalRow.get(getTimeDimension(dateConf))),
originalRow.get(metric));
transposedData.add(transposedRow);
}
Map<String, List<Map<String, Object>>> dataMerge = transposedData.stream()
.collect(Collectors.groupingBy(row -> getRowKey(row, groups)));
Map<String, List<Map<String, Object>>> dataMerge =
transposedData.stream()
.collect(Collectors.groupingBy(row -> getRowKey(row, groups)));
List<Map<String, Object>> resultData = Lists.newArrayList();
for (List<Map<String, Object>> data : dataMerge.values()) {
Map<String, Object> rowData = new HashMap<>();
@@ -70,5 +74,4 @@ public class DataTransformUtils {
return TimeDimensionEnum.DAY.getName();
}
}
}

View File

@@ -1,5 +1,7 @@
package com.tencent.supersonic.headless.core.utils;
import javax.sql.DataSource;
import com.alibaba.druid.util.StringUtils;
import com.tencent.supersonic.common.util.MD5Util;
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
@@ -7,7 +9,7 @@ import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.JdbcDataSource;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
@@ -15,6 +17,7 @@ import java.sql.SQLException;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
import static com.tencent.supersonic.common.pojo.Constants.COLON;
import static com.tencent.supersonic.common.pojo.Constants.DOUBLE_SLASH;
@@ -24,14 +27,11 @@ import static com.tencent.supersonic.common.pojo.Constants.NEW_LINE_CHAR;
import static com.tencent.supersonic.common.pojo.Constants.PATTERN_JDBC_TYPE;
import static com.tencent.supersonic.common.pojo.Constants.SPACE;
/**
* tools functions about jdbc
*/
/** tools functions about jdbc */
@Slf4j
public class JdbcDataSourceUtils {
@Getter
private static Set releaseSourceSet = new HashSet();
@Getter private static Set releaseSourceSet = new HashSet();
private JdbcDataSource jdbcDataSource;
public JdbcDataSourceUtils(JdbcDataSource jdbcDataSource) {
@@ -46,8 +46,9 @@ public class JdbcDataSourceUtils {
log.error(e.toString(), e);
return false;
}
try (Connection con = DriverManager.getConnection(database.getUrl(),
database.getUsername(), database.passwordDecrypt())) {
try (Connection con =
DriverManager.getConnection(
database.getUrl(), database.getUsername(), database.passwordDecrypt())) {
return con != null;
} catch (SQLException e) {
log.error(e.toString(), e);
@@ -115,7 +116,8 @@ public class JdbcDataSourceUtils {
log.error("e", e);
}
if (!StringUtils.isEmpty(className) && !className.contains("com.sun.proxy")
if (!StringUtils.isEmpty(className)
&& !className.contains("com.sun.proxy")
&& !className.contains("net.sf.cglib.proxy")) {
return className;
}
@@ -127,7 +129,12 @@ public class JdbcDataSourceUtils {
throw new RuntimeException("Not supported data type: jdbcUrl=" + jdbcUrl);
}
public static String getKey(String name, String jdbcUrl, String username, String password, String version,
public static String getKey(
String name,
String jdbcUrl,
String username,
String password,
String version,
boolean isExt) {
StringBuilder sb = new StringBuilder();
@@ -158,8 +165,10 @@ public class JdbcDataSourceUtils {
return dataSource.getConnection();
} catch (Exception e) {
log.error("Get connection error, jdbcUrl:{}, e:{}", database.getUrl(), e);
throw new RuntimeException("Get connection error, jdbcUrl:" + database.getUrl()
+ " you can try again later or reset datasource");
throw new RuntimeException(
"Get connection error, jdbcUrl:"
+ database.getUrl()
+ " you can try again later or reset datasource");
}
}
return conn;

View File

@@ -1,60 +1,64 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.core.pojo.DuckDbSource;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* tools functions to duckDb query
*/
/** tools functions to duckDb query */
public class JdbcDuckDbUtils {
public static void attachMysql(DuckDbSource duckDbSource, String host, Integer port, String user,
public static void attachMysql(
DuckDbSource duckDbSource,
String host,
Integer port,
String user,
String password,
String database)
throws Exception {
try {
duckDbSource.execute("INSTALL mysql");
duckDbSource.execute("load mysql");
String attachSql = "ATTACH 'host=%s port=%s user=%s password=%s database=%s' AS mysqldb (TYPE mysql);";
duckDbSource.execute(String.format(attachSql,
host,
port,
user,
password,
database
));
String attachSql =
"ATTACH 'host=%s port=%s user=%s password=%s database=%s' AS mysqldb (TYPE mysql);";
duckDbSource.execute(String.format(attachSql, host, port, user, password, database));
duckDbSource.execute("SET mysql_experimental_filter_pushdown = true;");
} catch (Exception e) {
throw e;
}
}
public static List<String> getParquetColumns(DuckDbSource duckDbSource, String parquetPath) throws Exception {
public static List<String> getParquetColumns(DuckDbSource duckDbSource, String parquetPath)
throws Exception {
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
duckDbSource.query(String.format("SELECT distinct name FROM parquet_schema('%s')", parquetPath),
duckDbSource.query(
String.format("SELECT distinct name FROM parquet_schema('%s')", parquetPath),
queryResultWithColumns);
if (!queryResultWithColumns.getResultList().isEmpty()) {
return queryResultWithColumns.getResultList().stream()
.filter(l -> l.containsKey("name") && Objects.nonNull(l.get("name")))
.map(l -> (String) l.get("name")).collect(Collectors.toList());
.map(l -> (String) l.get("name"))
.collect(Collectors.toList());
}
return new ArrayList<>();
}
public static List<String> getParquetPartition(DuckDbSource duckDbSource, String parquetPath, String partitionName)
throws Exception {
public static List<String> getParquetPartition(
DuckDbSource duckDbSource, String parquetPath, String partitionName) throws Exception {
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
duckDbSource.query(String.format("SELECT distinct %s as partition FROM read_parquet('%s')", partitionName,
parquetPath), queryResultWithColumns);
duckDbSource.query(
String.format(
"SELECT distinct %s as partition FROM read_parquet('%s')",
partitionName, parquetPath),
queryResultWithColumns);
if (!queryResultWithColumns.getResultList().isEmpty()) {
return queryResultWithColumns.getResultList().stream()
.filter(l -> l.containsKey("partition") && Objects.nonNull(l.get("partition")))
.map(l -> (String) l.get("partition")).collect(Collectors.toList());
.map(l -> (String) l.get("partition"))
.collect(Collectors.toList());
}
return new ArrayList<>();
}
@@ -64,9 +68,9 @@ public class JdbcDuckDbUtils {
return true;
}
public static boolean createView(DuckDbSource duckDbSource, String view, String sql) throws Exception {
public static boolean createView(DuckDbSource duckDbSource, String view, String sql)
throws Exception {
duckDbSource.execute(String.format("CREATE OR REPLACE VIEW %s AS %s;", view, sql));
return true;
}
}

View File

@@ -9,9 +9,7 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Schema Match Helper
*/
/** Schema Match Helper */
@Slf4j
public class SchemaMatchHelper {
@@ -20,22 +18,25 @@ public class SchemaMatchHelper {
return;
}
Set<String> metricDimensionDetectWordSet = matches.stream()
.filter(SchemaMatchHelper::isMetricOrDimension)
.map(SchemaElementMatch::getDetectWord).collect(Collectors.toSet());
Set<String> metricDimensionDetectWordSet =
matches.stream()
.filter(SchemaMatchHelper::isMetricOrDimension)
.map(SchemaElementMatch::getDetectWord)
.collect(Collectors.toSet());
matches.removeIf(elementMatch -> {
if (!isMetricOrDimension(elementMatch)) {
return false;
}
for (String detectWord : metricDimensionDetectWordSet) {
if (detectWord.startsWith(elementMatch.getDetectWord())
&& detectWord.length() > elementMatch.getDetectWord().length()) {
return true;
}
}
return false;
});
matches.removeIf(
elementMatch -> {
if (!isMetricOrDimension(elementMatch)) {
return false;
}
for (String detectWord : metricDimensionDetectWordSet) {
if (detectWord.startsWith(elementMatch.getDetectWord())
&& detectWord.length() > elementMatch.getDetectWord().length()) {
return true;
}
}
return false;
});
}
private static boolean isMetricOrDimension(SchemaElementMatch elementMatch) {

View File

@@ -2,32 +2,34 @@ package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.headless.api.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSqlDialect;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlDialect.Context;
import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public class SqlDialectFactory {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final Context POSTGRESQL_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final Context DEFAULT_CONTEXT =
SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final Context POSTGRESQL_CONTEXT =
SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
static {

View File

@@ -46,11 +46,7 @@ import static com.tencent.supersonic.common.pojo.Constants.MONTH;
import static com.tencent.supersonic.common.pojo.Constants.UNDERLINE;
import static com.tencent.supersonic.common.pojo.Constants.WEEK;
;
/**
* tools functions to analyze queryStructReq
*/
/** tools functions to analyze queryStructReq */
@Component
@Slf4j
public class SqlGenerateUtils {
@@ -61,8 +57,10 @@ public class SqlGenerateUtils {
private final ExecutorConfig executorConfig;
public SqlGenerateUtils(SqlFilterUtils sqlFilterUtils,
DateModeUtils dateModeUtils, ExecutorConfig executorConfig) {
public SqlGenerateUtils(
SqlFilterUtils sqlFilterUtils,
DateModeUtils dateModeUtils,
ExecutorConfig executorConfig) {
this.sqlFilterUtils = sqlFilterUtils;
this.dateModeUtils = dateModeUtils;
this.executorConfig = executorConfig;
@@ -100,16 +98,22 @@ public class SqlGenerateUtils {
}
public String getSelect(QueryParam queryParam) {
String aggStr = queryParam.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
String aggStr =
queryParam.getAggregators().stream()
.map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups())
? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr;
}
public String getSelect(QueryParam queryParam, Map<String, String> deriveMetrics) {
String aggStr = queryParam.getAggregators().stream().map(a -> getSelectField(a, deriveMetrics))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
String aggStr =
queryParam.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups())
? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr;
}
@@ -120,9 +124,20 @@ public class SqlGenerateUtils {
if (CollectionUtils.isEmpty(agg.getArgs())) {
return agg.getFunc() + "( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
return agg.getFunc() + "( " + agg.getArgs().stream().map(arg ->
arg.equals(agg.getColumn()) ? arg : (StringUtils.isNumeric(arg) ? arg : ("'" + arg + "'"))
).collect(Collectors.joining(",")) + " ) AS " + agg.getColumn() + " ";
return agg.getFunc()
+ "( "
+ agg.getArgs().stream()
.map(
arg ->
arg.equals(agg.getColumn())
? arg
: (StringUtils.isNumeric(arg)
? arg
: ("'" + arg + "'")))
.collect(Collectors.joining(","))
+ " ) AS "
+ agg.getColumn()
+ " ";
}
public String getSelectField(final Aggregator agg, Map<String, String> deriveMetrics) {
@@ -143,37 +158,51 @@ public class SqlGenerateUtils {
if (CollectionUtils.isEmpty(queryParam.getOrders())) {
return "";
}
return "order by " + queryParam.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
return "order by "
+ queryParam.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
public String getOrderBy(QueryParam queryParam, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(queryParam.getOrders())) {
return "";
}
if (!queryParam.getOrders().stream().anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
if (!queryParam.getOrders().stream()
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
return getOrderBy(queryParam);
}
return "order by " + queryParam.getOrders().stream()
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn()) ? deriveMetrics.get(order.getColumn())
: order.getColumn()) + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
return "order by "
+ queryParam.getOrders().stream()
.map(
order ->
" "
+ (deriveMetrics.containsKey(order.getColumn())
? deriveMetrics.get(order.getColumn())
: order.getColumn())
+ " "
+ order.getDirection()
+ " ")
.collect(Collectors.joining(","));
}
public String generateWhere(QueryParam queryParam, ItemDateResp itemDateResp) {
String whereClauseFromFilter = sqlFilterUtils.getWhereClause(queryParam.getDimensionFilters());
String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(queryParam.getDimensionFilters());
String whereFromDate = getDateWhereClause(queryParam.getDateInfo(), itemDateResp);
return mergeDateWhereClause(queryParam, whereClauseFromFilter, whereFromDate);
}
private String mergeDateWhereClause(QueryParam queryParam, String whereClauseFromFilter,
String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate) && StringUtils.isNotEmpty(whereClauseFromFilter)) {
private String mergeDateWhereClause(
QueryParam queryParam, String whereClauseFromFilter, String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
} else if (StringUtils.isEmpty(whereFromDate) && StringUtils.isNotEmpty(whereClauseFromFilter)) {
} else if (StringUtils.isEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) {
return whereClauseFromFilter;
} else if (StringUtils.isNotEmpty(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
} else if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isEmpty(whereClauseFromFilter)) {
return whereFromDate;
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
log.debug("the current date information is empty, enter the date initialization logic");
@@ -185,7 +214,7 @@ public class SqlGenerateUtils {
public String getDateWhereClause(DateConf dateInfo, ItemDateResp dateDate) {
if (Objects.isNull(dateDate)
|| StringUtils.isEmpty(dateDate.getStartDate())
&& StringUtils.isEmpty(dateDate.getEndDate())) {
&& StringUtils.isEmpty(dateDate.getEndDate())) {
if (dateInfo.getDateMode().equals(DateConf.DateMode.LIST)) {
return dateModeUtils.listDateStr(dateInfo);
}
@@ -202,7 +231,8 @@ public class SqlGenerateUtils {
return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
}
public Triple<String, String, String> getBeginEndTime(QueryParam queryParam, ItemDateResp dataDate) {
public Triple<String, String, String> getBeginEndTime(
QueryParam queryParam, ItemDateResp dataDate) {
if (Objects.isNull(queryParam.getDateInfo())) {
return Triple.of("", "", "");
}
@@ -216,27 +246,38 @@ public class SqlGenerateUtils {
case BETWEEN:
return Triple.of(dateInfo, dateConf.getStartDate(), dateConf.getEndDate());
case LIST:
return Triple.of(dateInfo, Collections.min(dateConf.getDateList()),
return Triple.of(
dateInfo,
Collections.min(dateConf.getDateList()),
Collections.max(dateConf.getDateList()));
case RECENT:
LocalDate dateMax = LocalDate.now().minusDays(1);
LocalDate dateMin = dateMax.minusDays(dateConf.getUnit() - 1);
if (Objects.isNull(dataDate)) {
return Triple.of(dateInfo, dateMin.format(DateTimeFormatter.ofPattern(DAY_FORMAT)),
return Triple.of(
dateInfo,
dateMin.format(DateTimeFormatter.ofPattern(DAY_FORMAT)),
dateMax.format(DateTimeFormatter.ofPattern(DAY_FORMAT)));
}
switch (dateConf.getPeriod()) {
case DAY:
ImmutablePair<String, String> dayInfo = dateModeUtils.recentDay(dataDate, dateConf);
ImmutablePair<String, String> dayInfo =
dateModeUtils.recentDay(dataDate, dateConf);
return Triple.of(dateInfo, dayInfo.left, dayInfo.right);
case WEEK:
ImmutablePair<String, String> weekInfo = dateModeUtils.recentWeek(dataDate, dateConf);
ImmutablePair<String, String> weekInfo =
dateModeUtils.recentWeek(dataDate, dateConf);
return Triple.of(dateInfo, weekInfo.left, weekInfo.right);
case MONTH:
List<ImmutablePair<String, String>> rets = dateModeUtils.recentMonth(dataDate, dateConf);
Optional<String> minBegins = rets.stream().map(i -> i.left).sorted().findFirst();
Optional<String> maxBegins = rets.stream().map(i -> i.right).sorted(Comparator.reverseOrder())
.findFirst();
List<ImmutablePair<String, String>> rets =
dateModeUtils.recentMonth(dataDate, dateConf);
Optional<String> minBegins =
rets.stream().map(i -> i.left).sorted().findFirst();
Optional<String> maxBegins =
rets.stream()
.map(i -> i.right)
.sorted(Comparator.reverseOrder())
.findFirst();
if (minBegins.isPresent() && maxBegins.isPresent()) {
return Triple.of(dateInfo, minBegins.get(), maxBegins.get());
}
@@ -247,19 +288,19 @@ public class SqlGenerateUtils {
break;
default:
break;
}
return Triple.of("", "", "");
}
public boolean isSupportWith(EngineType engineTypeEnum, String version) {
if (engineTypeEnum.equals(EngineType.MYSQL) && Objects.nonNull(version) && version.startsWith(
executorConfig.getMysqlLowVersion())) {
if (engineTypeEnum.equals(EngineType.MYSQL)
&& Objects.nonNull(version)
&& version.startsWith(executorConfig.getMysqlLowVersion())) {
return false;
}
if (engineTypeEnum.equals(EngineType.CLICKHOUSE) && Objects.nonNull(version)
&& StringUtil.compareVersion(version,
executorConfig.getCkLowVersion()) < 0) {
if (engineTypeEnum.equals(EngineType.CLICKHOUSE)
&& Objects.nonNull(version)
&& StringUtil.compareVersion(version, executorConfig.getCkLowVersion()) < 0) {
return false;
}
return true;
@@ -269,30 +310,44 @@ public class SqlGenerateUtils {
return modelBizName + UNDERLINE + executorConfig.getInternalMetricNameSuffix();
}
public String generateDerivedMetric(final List<MetricSchemaResp> metricResps, final Set<String> allFields,
final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps,
final String expression, final MetricDefineType metricDefineType,
AggOption aggOption,
Set<String> visitedMetric,
Set<String> measures,
Set<String> dimensions) {
public String generateDerivedMetric(
final List<MetricSchemaResp> metricResps,
final Set<String> allFields,
final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps,
final String expression,
final MetricDefineType metricDefineType,
AggOption aggOption,
Set<String> visitedMetric,
Set<String> measures,
Set<String> dimensions) {
Set<String> fields = SqlSelectHelper.getColumnFromExpr(expression);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
Optional<MetricSchemaResp> metricItem =
metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field))
.findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.contains(field)) {
break;
}
replace.put(field,
generateDerivedMetric(metricResps, allFields, allMeasures, dimensionResps,
getExpr(metricItem.get()), metricItem.get().getMetricDefineType(),
aggOption, visitedMetric, measures, dimensions));
replace.put(
field,
generateDerivedMetric(
metricResps,
allFields,
allMeasures,
dimensionResps,
getExpr(metricItem.get()),
metricItem.get().getMetricDefineType(),
aggOption,
visitedMetric,
measures,
dimensions));
visitedMetric.add(field);
}
break;
@@ -304,8 +359,10 @@ public class SqlGenerateUtils {
break;
case FIELD:
if (allFields.contains(field)) {
Optional<DimSchemaResp> dimensionItem = dimensionResps.stream()
.filter(d -> d.getBizName().equals(field)).findFirst();
Optional<DimSchemaResp> dimensionItem =
dimensionResps.stream()
.filter(d -> d.getBizName().equals(field))
.findFirst();
if (dimensionItem.isPresent()) {
dimensions.add(field);
} else {
@@ -315,7 +372,6 @@ public class SqlGenerateUtils {
break;
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
@@ -329,12 +385,17 @@ public class SqlGenerateUtils {
public String getExpr(Measure measure, AggOption aggOption) {
if (AggOperatorEnum.COUNT_DISTINCT.getOperator().equalsIgnoreCase(measure.getAgg())) {
return AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
: AggOperatorEnum.COUNT.getOperator() + " ( " + AggOperatorEnum.DISTINCT + " "
+ measure.getBizName()
+ " ) ";
return AggOption.NATIVE.equals(aggOption)
? measure.getBizName()
: AggOperatorEnum.COUNT.getOperator()
+ " ( "
+ AggOperatorEnum.DISTINCT
+ " "
+ measure.getBizName()
+ " ) ";
}
return AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
return AggOption.NATIVE.equals(aggOption)
? measure.getBizName()
: measure.getAgg() + " ( " + measure.getBizName() + " ) ";
}

View File

@@ -1,5 +1,7 @@
package com.tencent.supersonic.headless.core.utils;
import javax.sql.DataSource;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
@@ -13,7 +15,6 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.rmi.ServerException;
import java.sql.Connection;
import java.sql.ResultSet;
@@ -30,18 +31,14 @@ import java.util.Map;
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
/**
* tools functions about sql query
*/
/** tools functions about sql query */
@Slf4j
@Component
public class SqlUtils {
@Getter
private Database database;
@Getter private Database database;
@Autowired
private JdbcDataSource jdbcDataSource;
@Autowired private JdbcDataSource jdbcDataSource;
@Value("${s2.source.result-limit:1000000}")
private int resultLimit;
@@ -49,15 +46,11 @@ public class SqlUtils {
@Value("${s2.source.enable-query-log:false}")
private boolean isQueryLogEnable;
@Getter
private DataType dataTypeEnum;
@Getter private DataType dataTypeEnum;
@Getter
private JdbcDataSourceUtils jdbcDataSourceUtils;
@Getter private JdbcDataSourceUtils jdbcDataSourceUtils;
public SqlUtils() {
}
public SqlUtils() {}
public SqlUtils(Database database) {
this.database = database;
@@ -65,8 +58,7 @@ public class SqlUtils {
}
public SqlUtils init(Database database) {
return SqlUtilsBuilder
.getBuilder()
return SqlUtilsBuilder.getBuilder()
.withName(database.getId() + AT_SYMBOL + database.getName())
.withType(database.getType())
.withJdbcUrl(database.getUrl())
@@ -113,25 +105,27 @@ public class SqlUtils {
getResult(sql, queryResultWithColumns, jdbcTemplate());
}
private SemanticQueryResp getResult(String sql, SemanticQueryResp queryResultWithColumns,
JdbcTemplate jdbcTemplate) {
jdbcTemplate.query(sql, rs -> {
if (null == rs) {
return queryResultWithColumns;
}
private SemanticQueryResp getResult(
String sql, SemanticQueryResp queryResultWithColumns, JdbcTemplate jdbcTemplate) {
jdbcTemplate.query(
sql,
rs -> {
if (null == rs) {
return queryResultWithColumns;
}
ResultSetMetaData metaData = rs.getMetaData();
List<QueryColumn> queryColumns = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String key = metaData.getColumnLabel(i);
queryColumns.add(new QueryColumn(key, metaData.getColumnTypeName(i)));
}
queryResultWithColumns.setColumns(queryColumns);
ResultSetMetaData metaData = rs.getMetaData();
List<QueryColumn> queryColumns = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String key = metaData.getColumnLabel(i);
queryColumns.add(new QueryColumn(key, metaData.getColumnTypeName(i)));
}
queryResultWithColumns.setColumns(queryColumns);
List<Map<String, Object>> resultList = getAllData(rs, queryColumns);
queryResultWithColumns.setResultList(resultList);
return queryResultWithColumns;
});
List<Map<String, Object>> resultList = getAllData(rs, queryColumns);
queryResultWithColumns.setResultList(resultList);
return queryResultWithColumns;
});
return queryResultWithColumns;
}
@@ -147,7 +141,8 @@ public class SqlUtils {
return data;
}
private Map<String, Object> getLineData(ResultSet rs, List<QueryColumn> queryColumns) throws SQLException {
private Map<String, Object> getLineData(ResultSet rs, List<QueryColumn> queryColumns)
throws SQLException {
Map<String, Object> map = new LinkedHashMap<>();
for (QueryColumn queryColumn : queryColumns) {
String colName = queryColumn.getNameEn();
@@ -184,9 +179,7 @@ public class SqlUtils {
private String username;
private String password;
private SqlUtilsBuilder() {
}
private SqlUtilsBuilder() {}
public static SqlUtilsBuilder getBuilder() {
return new SqlUtilsBuilder();
@@ -233,13 +226,14 @@ public class SqlUtils {
}
public SqlUtils build() {
Database database = Database.builder()
.name(this.name)
.type(this.type)
.url(this.jdbcUrl)
.username(this.username)
.password(this.password)
.build();
Database database =
Database.builder()
.name(this.name)
.type(this.type)
.url(this.jdbcUrl)
.username(this.username)
.password(this.password)
.build();
SqlUtils sqlUtils = new SqlUtils(database);
sqlUtils.jdbcDataSource = this.jdbcDataSource;

View File

@@ -8,6 +8,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
import org.stringtemplate.v4.ST;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -15,14 +16,16 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.COMMA;
import static com.tencent.supersonic.common.pojo.Constants.EMPTY;
@Slf4j
public class SqlVariableParseUtils {
public static final String REG_SENSITIVE_SQL = "drop\\s|alter\\s|grant\\s|insert\\s|replace\\s|delete\\s|"
+ "truncate\\s|update\\s|remove\\s";
public static final String REG_SENSITIVE_SQL =
"drop\\s|alter\\s|grant\\s|insert\\s|replace\\s|delete\\s|"
+ "truncate\\s|update\\s|remove\\s";
public static final Pattern PATTERN_SENSITIVE_SQL = Pattern.compile(REG_SENSITIVE_SQL);
public static final String APOSTROPHE = "'";
@@ -34,33 +37,39 @@ public class SqlVariableParseUtils {
if (CollectionUtils.isEmpty(sqlVariables)) {
return sql;
}
//1. handle default variable value
sqlVariables.forEach(variable -> {
variables.put(variable.getName().trim(),
getValues(variable.getValueType(), variable.getDefaultValues()));
});
// 1. handle default variable value
sqlVariables.forEach(
variable -> {
variables.put(
variable.getName().trim(),
getValues(variable.getValueType(), variable.getDefaultValues()));
});
//override by variable param
// override by variable param
if (!CollectionUtils.isEmpty(params)) {
Map<String, List<SqlVariable>> map =
sqlVariables.stream().collect(Collectors.groupingBy(SqlVariable::getName));
params.forEach(p -> {
if (map.containsKey(p.getName())) {
List<SqlVariable> list = map.get(p.getName());
if (!CollectionUtils.isEmpty(list)) {
SqlVariable v = list.get(list.size() - 1);
variables.put(p.getName().trim(), getValue(v.getValueType(), p.getValue()));
}
}
});
params.forEach(
p -> {
if (map.containsKey(p.getName())) {
List<SqlVariable> list = map.get(p.getName());
if (!CollectionUtils.isEmpty(list)) {
SqlVariable v = list.get(list.size() - 1);
variables.put(
p.getName().trim(),
getValue(v.getValueType(), p.getValue()));
}
}
});
}
variables.forEach((k, v) -> {
if (v instanceof List && ((List) v).size() > 0) {
v = ((List) v).stream().collect(Collectors.joining(COMMA)).toString();
}
variables.put(k, v);
});
variables.forEach(
(k, v) -> {
if (v instanceof List && ((List) v).size() > 0) {
v = ((List) v).stream().collect(Collectors.joining(COMMA)).toString();
}
variables.put(k, v);
});
return parse(sql, variables);
}
@@ -79,12 +88,18 @@ public class SqlVariableParseUtils {
if (null != valueType) {
switch (valueType) {
case STRING:
return values.stream().map(String::valueOf)
.map(s -> s.startsWith(APOSTROPHE) && s.endsWith(APOSTROPHE)
? s : String.join(EMPTY, APOSTROPHE, s, APOSTROPHE))
return values.stream()
.map(String::valueOf)
.map(
s ->
s.startsWith(APOSTROPHE) && s.endsWith(APOSTROPHE)
? s
: String.join(EMPTY, APOSTROPHE, s, APOSTROPHE))
.collect(Collectors.toList());
case EXPR:
values.stream().map(String::valueOf).forEach(SqlVariableParseUtils::checkSensitiveSql);
values.stream()
.map(String::valueOf)
.forEach(SqlVariableParseUtils::checkSensitiveSql);
return values.stream().map(String::valueOf).collect(Collectors.toList());
case NUMBER:
return values.stream().map(String::valueOf).collect(Collectors.toList());
@@ -100,8 +115,11 @@ public class SqlVariableParseUtils {
if (null != valueType) {
switch (valueType) {
case STRING:
return String.join(EMPTY, value.startsWith(APOSTROPHE) ? EMPTY : APOSTROPHE,
value, value.endsWith(APOSTROPHE) ? EMPTY : APOSTROPHE);
return String.join(
EMPTY,
value.startsWith(APOSTROPHE) ? EMPTY : APOSTROPHE,
value,
value.endsWith(APOSTROPHE) ? EMPTY : APOSTROPHE);
case NUMBER:
case EXPR:
default:
@@ -117,8 +135,8 @@ public class SqlVariableParseUtils {
if (matcher.find()) {
String group = matcher.group();
log.warn("Sensitive SQL operations are not allowed: {}", group.toUpperCase());
throw new InvalidArgumentException("Sensitive SQL operations are not allowed: " + group.toUpperCase());
throw new InvalidArgumentException(
"Sensitive SQL operations are not allowed: " + group.toUpperCase());
}
}
}

View File

@@ -1,9 +1,9 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
import lombok.extern.slf4j.Slf4j;
@@ -11,15 +11,14 @@ import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* generate system time dimension tools
*/
/** generate system time dimension tools */
@Slf4j
public class SysTimeDimensionBuilder {
// Defines the regular expression pattern for the time keyword
private static final Pattern TIME_KEYWORD_PATTERN =
Pattern.compile("\\b(DATE|TIME|TIMESTAMP|YEAR|MONTH|DAY|HOUR|MINUTE|SECOND)\\b",
Pattern.compile(
"\\b(DATE|TIME|TIMESTAMP|YEAR|MONTH|DAY|HOUR|MINUTE|SECOND)\\b",
Pattern.CASE_INSENSITIVE);
public static void addSysTimeDimension(List<Dim> dims, DbAdaptor engineAdaptor) {
@@ -27,7 +26,7 @@ public class SysTimeDimensionBuilder {
Dim timeDim = getTimeDim(dims);
if (timeDim == null) {
timeDim = Dim.getDefault();
//todo not find the time dimension
// todo not find the time dimension
return;
}
dims.add(generateSysDayDimension(timeDim, engineAdaptor));
@@ -40,7 +39,9 @@ public class SysTimeDimensionBuilder {
Dim dim = new Dim();
dim.setBizName(TimeDimensionEnum.DAY.getName());
dim.setType(DimensionType.partition_time.name());
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.DAY.name().toLowerCase(), engineAdaptor));
dim.setExpr(
generateTimeExpr(
timeDim, TimeDimensionEnum.DAY.name().toLowerCase(), engineAdaptor));
DimensionTimeTypeParams typeParams = new DimensionTimeTypeParams();
typeParams.setTimeGranularity(TimeDimensionEnum.DAY.name().toLowerCase());
typeParams.setIsPrimary("true");
@@ -52,7 +53,9 @@ public class SysTimeDimensionBuilder {
Dim dim = new Dim();
dim.setBizName(TimeDimensionEnum.WEEK.getName());
dim.setType(DimensionType.partition_time.name());
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.WEEK.name().toLowerCase(), engineAdaptor));
dim.setExpr(
generateTimeExpr(
timeDim, TimeDimensionEnum.WEEK.name().toLowerCase(), engineAdaptor));
DimensionTimeTypeParams typeParams = new DimensionTimeTypeParams();
typeParams.setTimeGranularity(TimeDimensionEnum.WEEK.name().toLowerCase());
typeParams.setIsPrimary("false");
@@ -64,7 +67,9 @@ public class SysTimeDimensionBuilder {
Dim dim = new Dim();
dim.setBizName(TimeDimensionEnum.MONTH.getName());
dim.setType(DimensionType.partition_time.name());
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.MONTH.name().toLowerCase(), engineAdaptor));
dim.setExpr(
generateTimeExpr(
timeDim, TimeDimensionEnum.MONTH.name().toLowerCase(), engineAdaptor));
DimensionTimeTypeParams typeParams = new DimensionTimeTypeParams();
typeParams.setTimeGranularity(TimeDimensionEnum.MONTH.name().toLowerCase());
typeParams.setIsPrimary("false");
@@ -87,7 +92,6 @@ public class SysTimeDimensionBuilder {
} else {
return engineAdaptor.getDateFormat(dateType, dateFormat, bizName);
}
}
private static Dim getTimeDim(List<Dim> timeDims) {
@@ -98,5 +102,4 @@ public class SysTimeDimensionBuilder {
}
return null;
}
}

View File

@@ -2,8 +2,8 @@ package com.tencent.supersonic.chat.core.parser.aggregate;
import com.alibaba.fastjson.JSON;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser;
import org.junit.jupiter.api.Test;
import org.testng.Assert;
@@ -11,460 +11,462 @@ public class CalciteSqlParserTest {
@Test
public void testCalciteSqlParser() throws Exception {
String json = "{\n"
+ " \"dataSetId\": 1,\n"
+ " \"sql\": \"\",\n"
+ " \"sourceId\": \"\",\n"
+ " \"errMsg\": \"\",\n"
+ " \"metricQueryParam\": {\n"
+ " \"metrics\": [\n"
+ " \"pv\"\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " \"sys_imp_date\"\n"
+ " ],\n"
+ " \"nativeQuery\": false\n"
+ " },\n"
+ " \"status\": 0,\n"
+ " \"isS2SQL\": false,\n"
+ " \"enableOptimize\": true,\n"
+ " \"minMaxTime\": {\n"
+ " \"left\": \"sys_imp_date\",\n"
+ " \"middle\": \"2024-03-24\",\n"
+ " \"right\": \"2024-03-18\"\n"
+ " },\n"
+ " \"dataSetSql\": \"SELECT sys_imp_date, SUM(pv) AS pv FROM t_1 WHERE "
+ "sys_imp_date >= '2024-03-18' AND sys_imp_date <= '2024-03-24' GROUP BY sys_imp_date LIMIT 365\",\n"
+ " \"dataSetAlias\": \"t_1\",\n"
+ " \"dataSetSimplifySql\": \"\",\n"
+ " \"enableLimitWrapper\": false,\n"
+ " \"semanticModel\": {\n"
+ " \"schemaKey\": \"VIEW_1\",\n"
+ " \"metrics\": [\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"ATOMIC\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_pv\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"constraint\": \"\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": false,\n"
+ " \"expr\": \"s2_pv_uv_statis_pv\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"uv\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"DERIVED\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"user_id\",\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": true,\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"pv_avg\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"DERIVED\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"uv\",\n"
+ " \"expr\": \"uv\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": true,\n"
+ " \"expr\": \"pv\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"stay_hours\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"ATOMIC\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_stay_hours\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"constraint\": \"\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": false,\n"
+ " \"expr\": \"s2_stay_time_statis_stay_hours\"\n"
+ " }\n"
+ " }\n"
+ " ],\n"
+ " \"datasourceMap\": {\n"
+ " \"user_department\": {\n"
+ " \"id\": 1,\n"
+ " \"name\": \"user_department\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"select user_name,department from s2_user_department\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"department\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"user_department_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"none\"\n"
+ " },\n"
+ " \"s2_pv_uv_statis\": {\n"
+ " \"id\": 2,\n"
+ " \"name\": \"s2_pv_uv_statis\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"SELECT imp_date, user_name, page, 1 as pv, user_name as user_id "
+ "FROM s2_pv_uv_statis\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_week\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"DATE_TRUNC('week',imp_date)\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"week\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_week\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_month\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"FORMATDATETIME(PARSEDATETIME"
+ "(imp_date, 'yyyy-MM-dd'),'yyyy-MM') \",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"month\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_month\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_pv\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_user_id\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"user_id\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_id\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"day\"\n"
+ " },\n"
+ " \"s2_stay_time_statis\": {\n"
+ " \"id\": 3,\n"
+ " \"name\": \"s2_stay_time_statis\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"select imp_date,user_name,stay_hours"
+ ",page from s2_stay_time_statis\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_week\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"DATE_TRUNC('week',imp_date)\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"week\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_week\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_month\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"FORMATDATETIME(PARSEDATETIME"
+ "(imp_date, 'yyyy-MM-dd'),'yyyy-MM') \",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"month\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_month\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_stay_hours\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"stay_hours\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"stay_hours\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"stay_hours\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"day\"\n"
+ " }\n"
+ " },\n"
+ " \"dimensionMap\": {\n"
+ " \"user_department\": [\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"owners\": \"admin\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"department\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"s2_pv_uv_statis\": [\n"
+ " ],\n"
+ " \"s2_stay_time_statis\": [\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"owners\": \"admin\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " \"materializationList\": [\n"
+ " ],\n"
+ " \"joinRelations\": [\n"
+ " {\n"
+ " \"id\": 1,\n"
+ " \"left\": \"user_department\",\n"
+ " \"right\": \"s2_pv_uv_statis\",\n"
+ " \"joinType\": \"left join\",\n"
+ " \"joinCondition\": [\n"
+ " {\n"
+ " \"left\": \"user_name\",\n"
+ " \"middle\": \"=\",\n"
+ " \"right\": \"user_name\"\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"id\": 2,\n"
+ " \"left\": \"user_department\",\n"
+ " \"right\": \"s2_stay_time_statis\",\n"
+ " \"joinType\": \"left join\",\n"
+ " \"joinCondition\": [\n"
+ " {\n"
+ " \"left\": \"user_name\",\n"
+ " \"middle\": \"=\",\n"
+ " \"right\": \"user_name\"\n"
+ " }\n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"database\": {\n"
+ " \"id\": 1,\n"
+ " \"name\": \"数据实例\",\n"
+ " \"description\": \"样例数据实例\",\n"
+ " \"url\": \"jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false\",\n"
+ " \"username\": \"root\",\n"
+ " \"password\": \"semantic\",\n"
+ " \"type\": \"h2\",\n"
+ " \"connectInfo\": {\n"
+ " \"url\": \"jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false\",\n"
+ " \"userName\": \"root\",\n"
+ " \"password\": \"semantic\"\n"
+ " },\n"
+ " \"admins\": [\n"
+ " ],\n"
+ " \"viewers\": [\n"
+ " ],\n"
+ " \"createdBy\": \"admin\",\n"
+ " \"updatedBy\": \"admin\",\n"
+ " \"createdAt\": 1711367511146,\n"
+ " \"updatedAt\": 1711367511146\n"
+ " }\n"
+ " }\n"
+ "}";
String json =
"{\n"
+ " \"dataSetId\": 1,\n"
+ " \"sql\": \"\",\n"
+ " \"sourceId\": \"\",\n"
+ " \"errMsg\": \"\",\n"
+ " \"metricQueryParam\": {\n"
+ " \"metrics\": [\n"
+ " \"pv\"\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " \"sys_imp_date\"\n"
+ " ],\n"
+ " \"nativeQuery\": false\n"
+ " },\n"
+ " \"status\": 0,\n"
+ " \"isS2SQL\": false,\n"
+ " \"enableOptimize\": true,\n"
+ " \"minMaxTime\": {\n"
+ " \"left\": \"sys_imp_date\",\n"
+ " \"middle\": \"2024-03-24\",\n"
+ " \"right\": \"2024-03-18\"\n"
+ " },\n"
+ " \"dataSetSql\": \"SELECT sys_imp_date, SUM(pv) AS pv FROM t_1 WHERE "
+ "sys_imp_date >= '2024-03-18' AND sys_imp_date <= '2024-03-24' GROUP BY sys_imp_date LIMIT 365\",\n"
+ " \"dataSetAlias\": \"t_1\",\n"
+ " \"dataSetSimplifySql\": \"\",\n"
+ " \"enableLimitWrapper\": false,\n"
+ " \"semanticModel\": {\n"
+ " \"schemaKey\": \"VIEW_1\",\n"
+ " \"metrics\": [\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"ATOMIC\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_pv\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"constraint\": \"\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": false,\n"
+ " \"expr\": \"s2_pv_uv_statis_pv\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"uv\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"DERIVED\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"user_id\",\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": true,\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"pv_avg\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"DERIVED\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"uv\",\n"
+ " \"expr\": \"uv\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": true,\n"
+ " \"expr\": \"pv\"\n"
+ " }\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"stay_hours\",\n"
+ " \"owners\": [\n"
+ " \"admin\"\n"
+ " ],\n"
+ " \"type\": \"ATOMIC\",\n"
+ " \"metricTypeParams\": {\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_stay_hours\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"constraint\": \"\"\n"
+ " }\n"
+ " ],\n"
+ " \"isFieldMetric\": false,\n"
+ " \"expr\": \"s2_stay_time_statis_stay_hours\"\n"
+ " }\n"
+ " }\n"
+ " ],\n"
+ " \"datasourceMap\": {\n"
+ " \"user_department\": {\n"
+ " \"id\": 1,\n"
+ " \"name\": \"user_department\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"select user_name,department from s2_user_department\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"department\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"user_department_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"none\"\n"
+ " },\n"
+ " \"s2_pv_uv_statis\": {\n"
+ " \"id\": 2,\n"
+ " \"name\": \"s2_pv_uv_statis\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"SELECT imp_date, user_name, page, 1 as pv, user_name as user_id "
+ "FROM s2_pv_uv_statis\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_week\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"DATE_TRUNC('week',imp_date)\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"week\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_week\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_month\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"FORMATDATETIME(PARSEDATETIME"
+ "(imp_date, 'yyyy-MM-dd'),'yyyy-MM') \",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"month\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_month\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_pv\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_user_id\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"user_id\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_pv_uv_statis_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"pv\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"pv\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_id\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_id\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"day\"\n"
+ " },\n"
+ " \"s2_stay_time_statis\": {\n"
+ " \"id\": 3,\n"
+ " \"name\": \"s2_stay_time_statis\",\n"
+ " \"sourceId\": 1,\n"
+ " \"type\": \"h2\",\n"
+ " \"sqlQuery\": \"select imp_date,user_name,stay_hours"
+ ",page from s2_stay_time_statis\",\n"
+ " \"identifiers\": [\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"type\": \"primary\"\n"
+ " }\n"
+ " ],\n"
+ " \"dimensions\": [\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_date\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"imp_date\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"true\",\n"
+ " \"timeGranularity\": \"day\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_week\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"DATE_TRUNC('week',imp_date)\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"week\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_week\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"sys_imp_month\",\n"
+ " \"type\": \"time\",\n"
+ " \"expr\": \"FORMATDATETIME(PARSEDATETIME"
+ "(imp_date, 'yyyy-MM-dd'),'yyyy-MM') \",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " \"isPrimary\": \"false\",\n"
+ " \"timeGranularity\": \"month\"\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"sys_imp_month\"\n"
+ " }\n"
+ " ],\n"
+ " \"measures\": [\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_stay_hours\",\n"
+ " \"agg\": \"SUM\",\n"
+ " \"expr\": \"stay_hours\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"s2_stay_time_statis_internal_cnt\",\n"
+ " \"agg\": \"count\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"user_name\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"user_name\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"imp_date\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"imp_date\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"page\"\n"
+ " },\n"
+ " {\n"
+ " \"name\": \"stay_hours\",\n"
+ " \"agg\": \"\",\n"
+ " \"expr\": \"stay_hours\"\n"
+ " }\n"
+ " ],\n"
+ " \"aggTime\": \"day\"\n"
+ " }\n"
+ " },\n"
+ " \"dimensionMap\": {\n"
+ " \"user_department\": [\n"
+ " {\n"
+ " \"name\": \"department\",\n"
+ " \"owners\": \"admin\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"department\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"department\"\n"
+ " }\n"
+ " ],\n"
+ " \"s2_pv_uv_statis\": [\n"
+ " ],\n"
+ " \"s2_stay_time_statis\": [\n"
+ " {\n"
+ " \"name\": \"page\",\n"
+ " \"owners\": \"admin\",\n"
+ " \"type\": \"categorical\",\n"
+ " \"expr\": \"page\",\n"
+ " \"dimensionTimeTypeParams\": {\n"
+ " },\n"
+ " \"dataType\": \"UNKNOWN\",\n"
+ " \"bizName\": \"page\"\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " \"materializationList\": [\n"
+ " ],\n"
+ " \"joinRelations\": [\n"
+ " {\n"
+ " \"id\": 1,\n"
+ " \"left\": \"user_department\",\n"
+ " \"right\": \"s2_pv_uv_statis\",\n"
+ " \"joinType\": \"left join\",\n"
+ " \"joinCondition\": [\n"
+ " {\n"
+ " \"left\": \"user_name\",\n"
+ " \"middle\": \"=\",\n"
+ " \"right\": \"user_name\"\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"id\": 2,\n"
+ " \"left\": \"user_department\",\n"
+ " \"right\": \"s2_stay_time_statis\",\n"
+ " \"joinType\": \"left join\",\n"
+ " \"joinCondition\": [\n"
+ " {\n"
+ " \"left\": \"user_name\",\n"
+ " \"middle\": \"=\",\n"
+ " \"right\": \"user_name\"\n"
+ " }\n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"database\": {\n"
+ " \"id\": 1,\n"
+ " \"name\": \"数据实例\",\n"
+ " \"description\": \"样例数据库实例\",\n"
+ " \"url\": \"jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false\",\n"
+ " \"username\": \"root\",\n"
+ " \"password\": \"semantic\",\n"
+ " \"type\": \"h2\",\n"
+ " \"connectInfo\": {\n"
+ " \"url\": \"jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false\",\n"
+ " \"userName\": \"root\",\n"
+ " \"password\": \"semantic\"\n"
+ " },\n"
+ " \"admins\": [\n"
+ " ],\n"
+ " \"viewers\": [\n"
+ " ],\n"
+ " \"createdBy\": \"admin\",\n"
+ " \"updatedBy\": \"admin\",\n"
+ " \"createdAt\": 1711367511146,\n"
+ " \"updatedAt\": 1711367511146\n"
+ " }\n"
+ " }\n"
+ "}";
QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class);
CalciteQueryParser calciteSqlParser = new CalciteQueryParser();
calciteSqlParser.parse(queryStatement, AggOption.DEFAULT);
Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""),
Assert.assertEquals(
queryStatement.getSql().trim().replaceAll("\\s+", ""),
"SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`"
+ "FROM"
+ "`s2_pv_uv_statis`"
+ "GROUPBY`imp_date`,`imp_date`");
+ "FROM"
+ "`s2_pv_uv_statis`"
+ "GROUPBY`imp_date`,`imp_date`");
}
}