(improvement)(semantic) add dimension array dataType (#269)

Co-authored-by: jipengli <jipengli@tencent.com>
This commit is contained in:
jipeli
2023-10-23 10:23:00 +08:00
committed by GitHub
parent 8bd43f113b
commit cbf84876de
13 changed files with 289 additions and 33 deletions

View File

@@ -50,6 +50,7 @@ import com.tencent.supersonic.common.util.jsqlparser.SqlParserRemoveHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserReplaceHelper;
import com.tencent.supersonic.knowledge.dictionary.MapResult;
import com.tencent.supersonic.knowledge.service.SearchService;
import com.tencent.supersonic.knowledge.utils.HanlpHelper;
import com.tencent.supersonic.semantic.api.model.response.ExplainResp;
import com.tencent.supersonic.semantic.api.model.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.query.enums.FilterOperatorEnum;
@@ -622,6 +623,7 @@ public class QueryServiceImpl implements QueryService {
detectModelIds.add(dimensionValueReq.getModelId());
List<MapResult> mapResultList = SearchService.prefixSearch(dimensionValueReq.getValue().toString(),
2000, dimensionValueReq.getAgentId(), detectModelIds);
HanlpHelper.transLetterOriginal(mapResultList);
log.info("mapResultList:{}", mapResultList);
mapResultList = mapResultList.stream().filter(o -> {
for (String nature : o.getNatures()) {

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.semantic.api.model.yaml;
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
import lombok.Data;
@@ -17,4 +18,5 @@ public class DimensionYamlTpl {
private DimensionTimeTypeParamsTpl typeParams;
private DataTypeEnums dataType;
}

View File

@@ -15,6 +15,7 @@ import com.tencent.supersonic.semantic.model.domain.adaptor.engineadapter.Engine
import com.tencent.supersonic.semantic.model.domain.pojo.Datasource;
import com.tencent.supersonic.semantic.model.domain.pojo.DatasourceQueryEnum;
import com.tencent.supersonic.semantic.model.domain.utils.SysTimeDimensionBuilder;
import java.util.Objects;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
@@ -53,6 +54,9 @@ public class DatasourceYamlManager {
DimensionYamlTpl dimensionYamlTpl = new DimensionYamlTpl();
BeanUtils.copyProperties(dim, dimensionYamlTpl);
dimensionYamlTpl.setName(dim.getBizName());
if (Objects.isNull(dimensionYamlTpl.getExpr())) {
dimensionYamlTpl.setExpr(dim.getBizName());
}
if (dim.getTypeParams() != null) {
DimensionTimeTypeParamsTpl dimensionTimeTypeParamsTpl = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParamsTpl.setIsPrimary(dim.getTypeParams().getIsPrimary());

View File

@@ -21,6 +21,8 @@ import com.tencent.supersonic.semantic.query.parser.calcite.SemanticSchemaManage
import com.tencent.supersonic.semantic.query.parser.calcite.planner.AggPlanner;
import com.tencent.supersonic.semantic.query.parser.calcite.planner.MaterializationPlanner;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DataSource;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DataType;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Dimension;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Materialization;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Materialization.TimePartType;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.MaterializationElement;
@@ -294,10 +296,33 @@ public class MaterializationQuery implements QueryOptimizer {
setMetricExpr(semanticModel.getRootPath(), m.getName(), m);
});
}
if (!CollectionUtils.isEmpty(dataSource.getDimensions())) {
dataSource.getDimensions().stream().forEach(d -> {
setDimension(semanticModel.getRootPath(), d.getName(), d);
});
}
}
return semanticModel;
}
protected void setDimension(String rootPath, String bizName, Dimension dimension) {
try {
dimension.setDataType(DataType.UNKNOWN);
SemanticModel oriSemanticModel = semanticSchemaManager.get(rootPath);
if (Objects.nonNull(oriSemanticModel)) {
for (List<Dimension> dimensions : oriSemanticModel.getDimensionMap().values()) {
Optional<Dimension> dim = dimensions.stream()
.filter(d -> d.getName().equalsIgnoreCase(bizName)).findFirst();
if (dim.isPresent()) {
dimension.setDataType(dim.get().getDataType());
}
}
}
} catch (Exception e) {
log.error("getMetricExpr {}", e);
}
}
protected void setMetricExpr(String rootPath, String bizName, Measure measure) {
try {
measure.setExpr(bizName);

View File

@@ -13,6 +13,7 @@ import com.tencent.supersonic.semantic.api.model.yaml.MetricYamlTpl;
import com.tencent.supersonic.semantic.model.domain.Catalog;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Constants;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DataSource;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DataType;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Dimension;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DimensionTimeTypeParams;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Identify;
@@ -166,6 +167,12 @@ public class SemanticSchemaManager {
dimension.setExpr(dimensionYamlTpl.getExpr());
dimension.setName(dimensionYamlTpl.getName());
dimension.setOwners(dimensionYamlTpl.getOwners());
if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
dimension.setDataType(DataType.of(dimensionYamlTpl.getDataType().getType()));
}
if (Objects.isNull(dimension.getDataType())) {
dimension.setDataType(DataType.UNKNOWN);
}
dimension.setDimensionTimeTypeParams(getDimensionTimeTypeParams(dimensionYamlTpl.getTypeParams()));
dimensions.add(dimension);
}

View File

@@ -11,6 +11,7 @@ public class Constants {
public static final String JOIN_TABLE_LEFT_PREFIX = "src12_";
public static final String DIMENSION_TYPE_TIME_GRANULARITY_NONE = "none";
public static final String DIMENSION_TYPE_TIME = "time";
public static final String DIMENSION_ARRAY_SINGLE_SUFFIX = "_sgl";
public static final String MATERIALIZATION_ZIPPER_START = "start_";
public static final String MATERIALIZATION_ZIPPER_END = "end_";

View File

@@ -0,0 +1,55 @@
package com.tencent.supersonic.semantic.query.parser.calcite.s2ql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
JSON("JSON"),
VARCHAR("VARCHAR"),
DATE("DATE"),
BIGINT("BIGINT"),
INT("INT"),
DOUBLE("DOUBLE"),
FLOAT("FLOAT"),
DECIMAL("DECIMAL"),
UNKNOWN("unknown");
private String type;
DataType(String type) {
this.type = type;
}
public String getType() {
return type;
}
public static DataType of(String type) {
for (DataType typeEnum : DataType.values()) {
if (typeEnum.getType().equalsIgnoreCase(type)) {
return typeEnum;
}
}
return DataType.UNKNOWN;
}
public boolean isObject() {
return Arrays.asList(ARRAY, MAP, JSON).contains(this);
}
public boolean isArray() {
return ARRAY.equals(this);
}
}

View File

@@ -16,6 +16,8 @@ public class Dimension implements SemanticItem {
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
private DataType dataType = DataType.UNKNOWN;
@Override
public String getName() {
return name;

View File

@@ -7,8 +7,9 @@ import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Constants;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.DataSource;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Dimension;
import com.tencent.supersonic.semantic.query.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.extend.LateralViewExplodeNode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
@@ -18,11 +19,16 @@ import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
@Slf4j
public class DataSourceNode extends SemanticNode {
@@ -40,6 +46,29 @@ public class DataSourceNode extends SemanticNode {
return buildAs(datasource.getName(), getTable(sqlTable, scope));
}
public static SqlNode buildExtend(DataSource datasource, Set<String> exprList,
SqlValidatorScope scope)
throws Exception {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
SqlNode view = new SqlBasicCall(new LateralViewExplodeNode(), Arrays.asList(build(datasource, scope),
new SqlNodeList(getExtendField(exprList, scope), SqlParserPos.ZERO)), SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, view);
}
public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope) throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>();
for (String expr : exprList) {
sqlNodeList.add(parse(expr, scope));
sqlNodeList.add(new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO),
SqlParserPos.ZERO));
}
return sqlNodeList;
}
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseQuery();
@@ -52,7 +81,7 @@ public class DataSourceNode extends SemanticNode {
}
public static void getQueryDimensionMeasure(SemanticSchema schema, MetricReq metricCommand,
Set<String> queryDimension, List<String> measures) {
Set<String> queryDimension, List<String> measures) {
queryDimension.addAll(metricCommand.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d)
.collect(Collectors.toSet()));
@@ -64,8 +93,8 @@ public class DataSourceNode extends SemanticNode {
}
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricReq metricCommand,
Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope), filterConditions);
@@ -87,7 +116,7 @@ public class DataSourceNode extends SemanticNode {
}
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, SemanticSchema schema,
MetricReq metricCommand) throws Exception {
MetricReq metricCommand) throws Exception {
List<DataSource> dataSources = new ArrayList<>();
// check by metric
@@ -151,11 +180,11 @@ public class DataSourceNode extends SemanticNode {
}
private static boolean checkMatch(Set<String> sourceMeasure,
Set<String> queryDimension,
List<String> measures,
Set<String> dimension,
MetricReq metricCommand,
SqlValidatorScope scope) throws Exception {
Set<String> queryDimension,
List<String> measures,
Set<String> dimension,
MetricReq metricCommand,
SqlValidatorScope scope) throws Exception {
boolean isAllMatch = true;
sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) {
@@ -180,10 +209,10 @@ public class DataSourceNode extends SemanticNode {
}
private static List<DataSource> getLinkDataSources(Set<String> baseIdentifiers,
Set<String> queryDimension,
List<String> measures,
DataSource baseDataSource,
SemanticSchema schema) {
Set<String> queryDimension,
List<String> measures,
DataSource baseDataSource,
SemanticSchema schema) {
Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {

View File

@@ -1,8 +1,11 @@
package com.tencent.supersonic.semantic.query.parser.calcite.sql.node;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Constants;
import com.tencent.supersonic.semantic.query.parser.calcite.s2ql.Dimension;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -34,5 +37,30 @@ public class DimensionNode extends SemanticNode {
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope);
}
public static List<SqlNode> expandArray(Dimension dimension, SqlValidatorScope scope)
throws Exception {
if (dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return Arrays.asList(buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope)));
}
throw new Exception("array dimension expr should only identify");
}
return expand(dimension, scope);
}
}

View File

@@ -0,0 +1,35 @@
package com.tencent.supersonic.semantic.query.parser.calcite.sql.node;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlInternalOperator;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriter.Frame;
import org.apache.calcite.sql.SqlWriter.FrameTypeEnum;
public class ExtendNode extends SqlInternalOperator {
public ExtendNode() {
super(SqlKind.EXTEND.lowerName, SqlKind.EXTEND);
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
Frame frame = writer.startList(FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(operator.getName());
SqlNodeList list = (SqlNodeList) call.operand(1);
Frame frameArgs = writer.startList("(", ")");
for (int i = 0; i < list.size(); i++) {
list.get(i).unparse(writer, 0, 0);
if (i < list.size() - 1) {
writer.sep(",");
}
}
writer.endList(frameArgs);
writer.endList(frame);
}
}

View File

@@ -0,0 +1,61 @@
package com.tencent.supersonic.semantic.query.parser.calcite.sql.node.extend;
import com.tencent.supersonic.semantic.query.parser.calcite.sql.node.ExtendNode;
import java.util.Iterator;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
public class LateralViewExplodeNode extends ExtendNode {
public final String sqlNameView = "view";
public final String sqlNameExplode = "explode";
public LateralViewExplodeNode() {
super();
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
writer.setNeedWhitespace(true);
assert call.operandCount() == 2;
writer.sep(SqlKind.SELECT.lowerName);
writer.sep(SqlIdentifier.STAR.toString());
writer.sep("from");
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
SqlNodeList list = (SqlNodeList) call.operand(1);
Ord node;
Iterator var = Ord.zip(list).iterator();
while (var.hasNext()) {
node = (Ord) var.next();
if (node.i > 0 && node.i % 2 > 0) {
writer.sep(SqlKind.AS.lowerName);
((SqlNode) node.e).unparse(writer, 0, 0);
continue;
}
if (node.i > 0 && node.i % 2 == 0) {
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
}
explode(writer, (SqlNode) node.e);
}
writer.endList(frame);
}
public void explode(SqlWriter writer, SqlNode sqlNode) {
writer.sep(sqlNameExplode);
SqlWriter.Frame frame = writer.startList("(", ")");
sqlNode.unparse(writer, 0, 0);
writer.endList(frame);
writer.sep("tmp_sgl_" + sqlNode.toString());
}
}

View File

@@ -42,6 +42,7 @@ public class SourceRender extends Renderer {
List<String> queryMetrics = new ArrayList<>(reqMetrics);
List<String> queryDimensions = new ArrayList<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres);
Set<String> extendFields = new HashSet<>();
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
@@ -50,7 +51,9 @@ public class SourceRender extends Renderer {
queryMetrics = uniqList(queryMetrics);
queryDimensions.addAll(dimensions);
queryDimensions = uniqList(queryDimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, datasource, scope, schema, nonAgg);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope,
schema, nonAgg);
}
for (String metric : queryMetrics) {
@@ -72,9 +75,11 @@ public class SourceRender extends Renderer {
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension, datasource, schema, nonAgg, dataSet, output, scope);
: dimension, datasource, schema, nonAgg, extendFields, dataSet, output,
scope);
}
SqlNode tableNode = DataSourceNode.build(datasource, scope);
SqlNode tableNode = DataSourceNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() + "_" + UUID.randomUUID().toString()
@@ -83,7 +88,8 @@ public class SourceRender extends Renderer {
}
private static void buildDimension(String alias, String dimension, DataSource datasource, SemanticSchema schema,
boolean nonAgg, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope)
throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
@@ -127,12 +133,14 @@ public class SourceRender extends Renderer {
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
if (nonAgg) {
dataSet.getMeasure().add(DimensionNode.build(dimensionOptional.get(), scope));
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope));
return;
}
dataSet.getMeasure().add(DimensionNode.build(dimensionOptional.get(), scope));
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope));
}
}
@@ -146,7 +154,8 @@ public class SourceRender extends Renderer {
}
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SqlValidatorScope scope, SemanticSchema schema,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
@@ -164,11 +173,6 @@ public class SourceRender extends Renderer {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
if (nonAgg) {
whereNode.addAll(DimensionNode.expand(dim, scope));
isAdd = true;
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope));
isAdd = true;
}
@@ -183,11 +187,10 @@ public class SourceRender extends Renderer {
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
if (nonAgg) {
whereNode.add(DimensionNode.build(dimensionOptional.get(), scope));
continue;
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
whereNode.add(DimensionNode.build(dimensionOptional.get(), scope));
}
}
return whereNode;
@@ -195,9 +198,11 @@ public class SourceRender extends Renderer {
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SqlValidatorScope scope, SemanticSchema schema,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg) throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, datasource, scope, schema,
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource,
scope, schema,
nonAgg);
dataSet.getMeasure().addAll(whereNode);
//getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);