mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-13 13:07:32 +00:00
[improvement][project] supersonic 0.7.0 version backend update (#24)
* [improvement][project] supersonic 0.7.0 version backend update * [improvement][project] supersonic 0.7.0 version backend update * [improvement][project] supersonic 0.7.0 version readme update --------- Co-authored-by: jolunoluo <jolunoluo@tencent.com>
This commit is contained in:
@@ -42,6 +42,7 @@ public class QueryParser {
|
||||
if (!parseSqlReq.getSql().isEmpty()) {
|
||||
return parser(parseSqlReq);
|
||||
}
|
||||
metricReq.setNativeQuery(queryStructReq.getNativeQuery());
|
||||
return parser(metricReq);
|
||||
}
|
||||
|
||||
@@ -94,7 +95,7 @@ public class QueryParser {
|
||||
}
|
||||
|
||||
public QueryStatement parser(MetricReq metricCommand) {
|
||||
return parser(metricCommand, true);
|
||||
return parser(metricCommand, !metricCommand.isNativeQuery());
|
||||
}
|
||||
|
||||
public QueryStatement parser(MetricReq metricCommand, boolean isAgg) {
|
||||
|
||||
@@ -2,12 +2,6 @@ package com.tencent.supersonic.semantic.query.parser.calcite.sql.render;
|
||||
|
||||
|
||||
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.DataSourceNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.DimensionNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.FilterNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.dsl.Constants;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.dsl.DataSource;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.dsl.Dimension;
|
||||
@@ -15,7 +9,13 @@ import com.tencent.supersonic.semantic.query.parser.calcite.dsl.Identify;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.dsl.Measure;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.dsl.Metric;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.DataSourceNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.DimensionNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.FilterNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.IdentifyNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.SemanticNode;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
@@ -33,7 +33,7 @@ import org.springframework.util.CollectionUtils;
|
||||
@Slf4j
|
||||
public class SourceRender extends Renderer {
|
||||
|
||||
public static TableView renderOne(String alias, List<String> fieldWhere,
|
||||
public static TableView renderOne(String alias, List<String> fieldWheres,
|
||||
List<String> reqMetrics, List<String> reqDimensions, String queryWhere, DataSource datasource,
|
||||
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
|
||||
@@ -41,11 +41,8 @@ public class SourceRender extends Renderer {
|
||||
TableView output = new TableView();
|
||||
List<String> queryMetrics = new ArrayList<>(reqMetrics);
|
||||
List<String> queryDimensions = new ArrayList<>(reqDimensions);
|
||||
List<String> fieldWhere = new ArrayList<>(fieldWheres);
|
||||
if (!fieldWhere.isEmpty()) {
|
||||
// SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
|
||||
// if (addWhere) {
|
||||
// output.getFilter().add(sqlNode);
|
||||
// }
|
||||
Set<String> dimensions = new HashSet<>();
|
||||
Set<String> metrics = new HashSet<>();
|
||||
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics);
|
||||
@@ -69,7 +66,8 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
}
|
||||
for (String dimension : queryDimensions) {
|
||||
if(dimension.contains(Constants.DIMENSION_IDENTIFY) && queryDimensions.contains(dimension.split(Constants.DIMENSION_IDENTIFY)[1])){
|
||||
if (dimension.contains(Constants.DIMENSION_IDENTIFY) && queryDimensions.contains(
|
||||
dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
|
||||
continue;
|
||||
}
|
||||
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
|
||||
@@ -213,14 +211,15 @@ public class SourceRender extends Renderer {
|
||||
continue;
|
||||
}
|
||||
String filterField = field;
|
||||
if(field.contains(Constants.DIMENSION_IDENTIFY)) {
|
||||
if (field.contains(Constants.DIMENSION_IDENTIFY)) {
|
||||
filterField = field.split(Constants.DIMENSION_IDENTIFY)[1];
|
||||
}
|
||||
addField(filterField,field,datasource,schema,dimensions,metrics);
|
||||
addField(filterField, field, datasource, schema, dimensions, metrics);
|
||||
}
|
||||
}
|
||||
|
||||
private static void addField(String field,String oriField,DataSource datasource, SemanticSchema schema, Set<String> dimensions,
|
||||
private static void addField(String field, String oriField, DataSource datasource, SemanticSchema schema,
|
||||
Set<String> dimensions,
|
||||
Set<String> metrics) {
|
||||
Optional<Dimension> dimension = datasource.getDimensions().stream()
|
||||
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
|
||||
@@ -251,9 +250,11 @@ public class SourceRender extends Renderer {
|
||||
Optional<Metric> datasourceMetric = schema.getMetrics()
|
||||
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
|
||||
if (datasourceMetric.isPresent()) {
|
||||
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures().stream().map(m->m.getName()).collect(
|
||||
Collectors.toSet());
|
||||
if(datasource.getMeasures().stream().map(m->m.getName()).collect(Collectors.toSet()).containsAll(measures)){
|
||||
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures().stream()
|
||||
.map(m -> m.getName()).collect(
|
||||
Collectors.toSet());
|
||||
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
|
||||
.containsAll(measures)) {
|
||||
metrics.add(oriField);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -197,9 +197,9 @@ public class CalculateAggConverter implements SemanticConverter {
|
||||
public String getOverSelect(QueryStructReq queryStructCmd, boolean isOver) {
|
||||
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s",
|
||||
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
|
||||
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
|
||||
f.getColumn());
|
||||
f.getFunc().getOperator(),f.getColumn());
|
||||
} else {
|
||||
return f.getColumn();
|
||||
}
|
||||
@@ -335,9 +335,9 @@ public class CalculateAggConverter implements SemanticConverter {
|
||||
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
return String.format(
|
||||
"if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s",
|
||||
"if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
|
||||
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
|
||||
f.getColumn());
|
||||
f.getColumn(),f.getFunc().getOperator(),f.getColumn());
|
||||
} else {
|
||||
return f.getColumn();
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ public class DateUtils {
|
||||
private String sysDateWeekCol;
|
||||
|
||||
public Boolean recentMode(DateConf dateInfo) {
|
||||
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT_UNITS == dateInfo.getDateMode()
|
||||
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT == dateInfo.getDateMode()
|
||||
&& DAY.equalsIgnoreCase(dateInfo.getPeriod()) && Objects.nonNull(dateInfo.getUnit())) {
|
||||
return true;
|
||||
}
|
||||
@@ -47,7 +47,7 @@ public class DateUtils {
|
||||
}
|
||||
|
||||
public boolean hasAvailableDataMode(DateConf dateInfo) {
|
||||
if (Objects.nonNull(dateInfo) && DateConf.DateMode.AVAILABLE_TIME == dateInfo.getDateMode()) {
|
||||
if (Objects.nonNull(dateInfo) && DateConf.DateMode.AVAILABLE == dateInfo.getDateMode()) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -263,16 +263,16 @@ public class DateUtils {
|
||||
public String getDateWhereStr(DateConf dateInfo, ItemDateResp dateDate) {
|
||||
String dateStr = "";
|
||||
switch (dateInfo.getDateMode()) {
|
||||
case BETWEEN_CONTINUOUS:
|
||||
case BETWEEN:
|
||||
dateStr = betweenDateStr(dateDate, dateInfo);
|
||||
break;
|
||||
case LIST_DISCRETE:
|
||||
case LIST:
|
||||
dateStr = listDateStr(dateDate, dateInfo);
|
||||
break;
|
||||
case RECENT_UNITS:
|
||||
case RECENT:
|
||||
dateStr = recentDateStr(dateDate, dateInfo);
|
||||
break;
|
||||
case AVAILABLE_TIME:
|
||||
case AVAILABLE:
|
||||
dateStr = hasDataModeStr(dateDate, dateInfo);
|
||||
break;
|
||||
default:
|
||||
|
||||
@@ -101,10 +101,10 @@ public class QueryStructUtils {
|
||||
if (Objects.isNull(dateDate)
|
||||
|| Strings.isEmpty(dateDate.getStartDate())
|
||||
&& Strings.isEmpty(dateDate.getEndDate())) {
|
||||
if (dateInfo.getDateMode().equals(DateMode.LIST_DISCRETE)) {
|
||||
if (dateInfo.getDateMode().equals(DateMode.LIST)) {
|
||||
return dateUtils.listDateStr(dateDate, dateInfo);
|
||||
}
|
||||
if (dateInfo.getDateMode().equals(DateMode.BETWEEN_CONTINUOUS)) {
|
||||
if (dateInfo.getDateMode().equals(DateMode.BETWEEN)) {
|
||||
return dateUtils.betweenDateStr(dateDate, dateInfo);
|
||||
}
|
||||
if (dateUtils.hasAvailableDataMode(dateInfo)) {
|
||||
|
||||
Reference in New Issue
Block a user