(improvement)(headless) Modify the headless module structure to api, core and server (#588)

Co-authored-by: jolunoluo
This commit is contained in:
LXW
2024-01-02 16:43:28 +08:00
committed by GitHub
parent af1c560cc4
commit e7f13572d7
352 changed files with 2296 additions and 2675 deletions

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface ApiHeaderCheck {
}

View File

@@ -0,0 +1,14 @@
package com.tencent.supersonic.headless.core.annotation;
import java.lang.annotation.Target;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Documented;
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface S2SQLDataPermission {
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface StructDataPermission {
}

View File

@@ -0,0 +1,74 @@
package com.tencent.supersonic.headless.core.aspect;
import com.tencent.supersonic.common.pojo.Pair;
import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.common.util.SignatureUtils;
import com.tencent.supersonic.headless.common.server.enums.AppStatus;
import com.tencent.supersonic.headless.common.server.response.AppDetailResp;
import com.tencent.supersonic.headless.server.service.AppService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
@Component
@Aspect
@Order(1)
@Slf4j
public class ApiHeaderCheckAspect {
public static final String APPID = "appId";
private static final String TIMESTAMP = "timestamp";
private static final String SIGNATURE = "signature";
@Autowired
private AppService appService;
@Pointcut("@annotation(com.tencent.supersonic.headless.core.annotation.ApiHeaderCheck)")
private void apiPermissionCheck() {
}
@Around("apiPermissionCheck()")
public Object doAround(ProceedingJoinPoint joinPoint) throws Throwable {
Object[] objects = joinPoint.getArgs();
HttpServletRequest request = (HttpServletRequest) objects[1];
checkHeader(request);
return joinPoint.proceed();
}
private void checkHeader(HttpServletRequest request) {
String timestampStr = request.getHeader(TIMESTAMP);
String signature = request.getHeader(SIGNATURE);
String appId = request.getHeader(APPID);
if (StringUtils.isBlank(timestampStr)) {
throw new InvalidArgumentException("header中timestamp不可为空");
}
if (StringUtils.isBlank(signature)) {
throw new InvalidArgumentException("header中signature不可为空");
}
if (StringUtils.isBlank(appId)) {
throw new InvalidArgumentException("header中appId不可为空");
}
AppDetailResp appDetailResp = appService.getApp(Integer.parseInt(appId));
if (appDetailResp == null) {
throw new InvalidArgumentException("该appId对应的应用不存在");
}
if (!AppStatus.ONLINE.equals(appDetailResp.getAppStatus())) {
throw new InvalidArgumentException("该应用暂时为非在线状态");
}
Pair<Boolean, String> checkResult = SignatureUtils.isValidSignature(appId, appDetailResp.getAppSecret(),
Long.parseLong(timestampStr), signature);
if (!checkResult.first) {
throw new InvalidArgumentException(checkResult.second);
}
}
}

View File

@@ -0,0 +1,272 @@
package com.tencent.supersonic.headless.core.aspect;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authorization.pojo.AuthRes;
import com.tencent.supersonic.auth.api.authorization.pojo.AuthResGrp;
import com.tencent.supersonic.auth.api.authorization.pojo.DimensionFilter;
import com.tencent.supersonic.auth.api.authorization.request.QueryAuthResReq;
import com.tencent.supersonic.auth.api.authorization.response.AuthorizedResourceResp;
import com.tencent.supersonic.auth.api.authorization.service.AuthService;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.QueryAuthorization;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum;
import com.tencent.supersonic.common.pojo.exception.InvalidPermissionException;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@Service
@Slf4j
public class AuthCheckBaseAspect {
private static final ObjectMapper MAPPER = new ObjectMapper().setDateFormat(
new SimpleDateFormat(Constants.DAY_FORMAT));
@Autowired
private AuthService authService;
@Autowired
private DimensionService dimensionService;
@Autowired
private MetricService metricService;
@Autowired
private ModelService modelService;
public boolean doModelAdmin(User user, List<Long> modelIds) {
List<ModelResp> modelListAdmin = modelService.getModelListWithAuth(user, null, AuthType.ADMIN);
if (CollectionUtils.isEmpty(modelListAdmin)) {
return false;
} else {
Set<Long> modelAdmins = modelListAdmin.stream().map(ModelResp::getId).collect(Collectors.toSet());
return !CollectionUtils.isEmpty(modelAdmins) && modelAdmins.containsAll(modelIds);
}
}
public void doModelVisible(User user, List<Long> modelIds) {
Boolean visible = true;
List<ModelResp> modelListVisible = modelService.getModelListWithAuth(user, null, AuthType.VISIBLE);
if (CollectionUtils.isEmpty(modelListVisible)) {
visible = false;
} else {
Set<Long> modelVisibles = modelListVisible.stream().map(ModelResp::getId).collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(modelVisibles) && !modelVisibles.containsAll(modelIds)) {
visible = false;
}
}
if (!visible) {
ModelResp modelResp = modelService.getModel(modelIds.get(0));
String modelName = modelResp.getName();
List<String> admins = modelService.getModelAdmin(modelResp.getId());
String message = String.format("您没有模型[%s]权限,请联系管理员%s开通", modelName, admins);
throw new InvalidPermissionException(message);
}
}
public Set<String> getHighSensitiveColsByModelId(List<Long> modelIds) {
Set<String> highSensitiveCols = new HashSet<>();
MetaFilter metaFilter = new MetaFilter();
metaFilter.setModelIds(modelIds);
metaFilter.setSensitiveLevel(SensitiveLevelEnum.HIGH.getCode());
List<DimensionResp> highSensitiveDimensions = dimensionService.getDimensions(metaFilter);
List<MetricResp> highSensitiveMetrics = metricService.getMetrics(metaFilter);
if (!CollectionUtils.isEmpty(highSensitiveDimensions)) {
highSensitiveDimensions.forEach(dim -> highSensitiveCols.add(dim.getBizName()));
}
if (!CollectionUtils.isEmpty(highSensitiveMetrics)) {
highSensitiveMetrics.forEach(metric -> highSensitiveCols.add(metric.getBizName()));
}
return highSensitiveCols;
}
public AuthorizedResourceResp getAuthorizedResource(User user, List<Long> modelIds,
Set<String> sensitiveResReq) {
List<AuthRes> resourceReqList = new ArrayList<>();
sensitiveResReq.forEach(res -> resourceReqList.add(new AuthRes(modelIds.get(0), res)));
QueryAuthResReq queryAuthResReq = new QueryAuthResReq();
queryAuthResReq.setResources(resourceReqList);
queryAuthResReq.setModelIds(modelIds);
AuthorizedResourceResp authorizedResource = fetchAuthRes(queryAuthResReq, user);
log.info("user:{}, domainId:{}, after queryAuthorizedResources:{}", user.getName(), modelIds,
authorizedResource);
return authorizedResource;
}
private AuthorizedResourceResp fetchAuthRes(QueryAuthResReq queryAuthResReq, User user) {
log.info("queryAuthResReq:{}", queryAuthResReq);
return authService.queryAuthorizedResources(queryAuthResReq, user);
}
public Set<String> getAuthResNameSet(AuthorizedResourceResp authorizedResource, List<Long> modelIds) {
Set<String> resAuthName = new HashSet<>();
List<AuthResGrp> authResGrpList = authorizedResource.getResources();
authResGrpList.stream().forEach(authResGrp -> {
List<AuthRes> cols = authResGrp.getGroup();
if (!CollectionUtils.isEmpty(cols)) {
cols.stream().filter(col -> modelIds.contains(col.getModelId()))
.forEach(col -> resAuthName.add(col.getName()));
}
});
log.info("resAuthName:{}", resAuthName);
return resAuthName;
}
public boolean allSensitiveResReqIsOk(Set<String> sensitiveResReq, Set<String> resAuthSet) {
if (resAuthSet.containsAll(sensitiveResReq)) {
return true;
}
log.info("sensitiveResReq:{}, resAuthSet:{}", sensitiveResReq, resAuthSet);
return false;
}
public QueryResultWithSchemaResp getQueryResultWithColumns(QueryResultWithSchemaResp resultWithColumns,
List<Long> modelIds,
AuthorizedResourceResp authResource) {
addPromptInfoInfo(modelIds, resultWithColumns, authResource, Sets.newHashSet());
return resultWithColumns;
}
public QueryResultWithSchemaResp desensitizationData(QueryResultWithSchemaResp raw, Set<String> need2Apply) {
log.debug("start desensitizationData logic");
if (CollectionUtils.isEmpty(need2Apply)) {
log.info("user has all sensitiveRes");
return raw;
}
List<QueryColumn> columns = raw.getColumns();
boolean doDesensitization = false;
for (QueryColumn queryColumn : columns) {
for (String sensitiveCol : need2Apply) {
if (queryColumn.getNameEn().contains(sensitiveCol)) {
doDesensitization = true;
break;
}
}
}
if (!doDesensitization) {
return raw;
}
QueryResultWithSchemaResp queryResultWithColumns = raw;
try {
queryResultWithColumns = deepCopyResult(raw);
} catch (Exception e) {
log.warn("deepCopyResult: ", e);
}
addAuthorizedSchemaInfo(queryResultWithColumns.getColumns(), need2Apply);
desensitizationInternal(queryResultWithColumns.getResultList(), need2Apply);
return queryResultWithColumns;
}
private void addAuthorizedSchemaInfo(List<QueryColumn> columns, Set<String> need2Apply) {
if (CollectionUtils.isEmpty(need2Apply)) {
return;
}
columns.stream().forEach(col -> {
if (need2Apply.contains(col.getNameEn())) {
col.setAuthorized(false);
}
});
}
private void desensitizationInternal(List<Map<String, Object>> result, Set<String> need2Apply) {
log.info("start desensitizationInternal logic");
for (int i = 0; i < result.size(); i++) {
Map<String, Object> row = result.get(i);
Map<String, Object> newRow = new HashMap<>();
for (String col : row.keySet()) {
boolean sensitive = false;
for (String sensitiveCol : need2Apply) {
if (col.contains(sensitiveCol)) {
sensitive = true;
break;
}
}
if (sensitive) {
newRow.put(col, "******");
} else {
newRow.put(col, row.get(col));
}
}
result.set(i, newRow);
}
}
private QueryResultWithSchemaResp deepCopyResult(QueryResultWithSchemaResp raw) throws Exception {
QueryResultWithSchemaResp queryResultWithColumns = new QueryResultWithSchemaResp();
BeanUtils.copyProperties(raw, queryResultWithColumns);
List<QueryColumn> columns = new ArrayList<>();
if (!CollectionUtils.isEmpty(raw.getColumns())) {
String columnsStr = MAPPER.writeValueAsString(raw.getColumns());
columns = MAPPER.readValue(columnsStr, new TypeReference<List<QueryColumn>>() {
});
queryResultWithColumns.setColumns(columns);
}
queryResultWithColumns.setColumns(columns);
List<Map<String, Object>> resultData = new ArrayList<>();
if (!CollectionUtils.isEmpty(raw.getResultList())) {
for (Map<String, Object> line : raw.getResultList()) {
Map<String, Object> newLine = new HashMap<>();
newLine.putAll(line);
resultData.add(newLine);
}
}
queryResultWithColumns.setResultList(resultData);
return queryResultWithColumns;
}
public void addPromptInfoInfo(List<Long> modelIds, QueryResultWithSchemaResp queryResultWithColumns,
AuthorizedResourceResp authorizedResource, Set<String> need2Apply) {
List<DimensionFilter> filters = authorizedResource.getFilters();
if (CollectionUtils.isEmpty(need2Apply) && CollectionUtils.isEmpty(filters)) {
return;
}
List<String> admins = modelService.getModelAdmin(modelIds.get(0));
if (!CollectionUtils.isEmpty(need2Apply)) {
String promptInfo = String.format("当前结果已经过脱敏处理, 申请权限请联系管理员%s", admins);
queryResultWithColumns.setQueryAuthorization(new QueryAuthorization(promptInfo));
}
if (!CollectionUtils.isEmpty(filters)) {
log.debug("dimensionFilters:{}", filters);
ModelResp modelResp = modelService.getModel(modelIds.get(0));
List<String> exprList = new ArrayList<>();
List<String> descList = new ArrayList<>();
filters.stream().forEach(filter -> {
descList.add(filter.getDescription());
exprList.add(filter.getExpressions().toString());
});
String promptInfo = "当前结果已经过行权限过滤,详细过滤条件如下:%s, 申请权限请联系管理员%s";
String message = String.format(promptInfo, CollectionUtils.isEmpty(descList) ? exprList : descList, admins);
queryResultWithColumns.setQueryAuthorization(
new QueryAuthorization(modelResp.getName(), exprList, descList, message));
log.info("queryResultWithColumns:{}", queryResultWithColumns);
}
}
}

View File

@@ -0,0 +1,312 @@
package com.tencent.supersonic.headless.core.aspect;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.common.util.jsqlparser.FieldExpression;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserReplaceHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserSelectHelper;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.pojo.DimValueMap;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.DimensionService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.util.Strings;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@Aspect
@Component
@Slf4j
public class DimValueAspect {
@Value("${dimension.value.map.enable:true}")
private Boolean dimensionValueMapEnable;
@Value("${dimension.value.map.sql.enable:true}")
private Boolean dimensionValueMapSqlEnable;
@Autowired
private DimensionService dimensionService;
@Around("execution(* com.tencent.supersonic.headless.core.service.impl.QueryServiceImpl.queryBySql(..))")
public Object handleSqlDimValue(ProceedingJoinPoint joinPoint) throws Throwable {
if (!dimensionValueMapSqlEnable) {
log.debug("sql dimensionValueMapEnable is false, skip dimensionValueMap");
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) joinPoint.proceed();
return queryResultWithColumns;
}
Object[] args = joinPoint.getArgs();
QueryS2SQLReq queryS2SQLReq = (QueryS2SQLReq) args[0];
MetaFilter metaFilter = new MetaFilter(Lists.newArrayList(queryS2SQLReq.getModelIds()));
String sql = queryS2SQLReq.getSql();
log.info("correctorSql before replacing:{}", sql);
// if dimensionvalue is alias,consider the true dimensionvalue.
List<FieldExpression> fieldExpressionList = SqlParserSelectHelper.getWhereExpressions(sql);
List<DimensionResp> dimensions = dimensionService.getDimensions(metaFilter);
Set<String> fieldNames = dimensions.stream().map(o -> o.getName()).collect(Collectors.toSet());
Map<String, Map<String, String>> filedNameToValueMap = new HashMap<>();
fieldExpressionList.stream().forEach(expression -> {
if (fieldNames.contains(expression.getFieldName())) {
dimensions.stream().forEach(dimension -> {
if (expression.getFieldName().equals(dimension.getName())
&& !CollectionUtils.isEmpty(dimension.getDimValueMaps())) {
// consider '=' filter
if (expression.getOperator().equals(FilterOperatorEnum.EQUALS.getValue())) {
dimension.getDimValueMaps().stream().forEach(dimValue -> {
if (!CollectionUtils.isEmpty(dimValue.getAlias())
&& dimValue.getAlias().contains(expression.getFieldValue().toString())) {
getFiledNameToValueMap(filedNameToValueMap, expression.getFieldValue().toString(),
dimValue.getTechName(), expression.getFieldName());
}
});
}
// consider 'in' filter,each element needs to judge.
replaceInCondition(expression, dimension, filedNameToValueMap);
}
});
}
});
log.info("filedNameToValueMap:{}", filedNameToValueMap);
sql = SqlParserReplaceHelper.replaceValue(sql, filedNameToValueMap);
log.info("correctorSql after replacing:{}", sql);
queryS2SQLReq.setSql(sql);
Map<String, Map<String, String>> techNameToBizName = getTechNameToBizName(dimensions);
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) joinPoint.proceed();
if (Objects.nonNull(queryResultWithColumns)) {
rewriteDimValue(queryResultWithColumns, techNameToBizName);
}
return queryResultWithColumns;
}
public void replaceInCondition(FieldExpression expression, DimensionResp dimension,
Map<String, Map<String, String>> filedNameToValueMap) {
if (expression.getOperator().equals(FilterOperatorEnum.IN.getValue())) {
String fieldValue = JsonUtil.toString(expression.getFieldValue());
fieldValue = fieldValue.replace("'", "");
List<String> values = JsonUtil.toList(fieldValue, String.class);
List<String> revisedValues = new ArrayList<>();
for (int i = 0; i < values.size(); i++) {
Boolean flag = new Boolean(false);
for (DimValueMap dimValueMap : dimension.getDimValueMaps()) {
if (!CollectionUtils.isEmpty(dimValueMap.getAlias())
&& dimValueMap.getAlias().contains(values.get(i))) {
flag = true;
revisedValues.add(dimValueMap.getTechName());
break;
}
}
if (!flag) {
revisedValues.add(values.get(i));
}
}
if (!revisedValues.equals(values)) {
getFiledNameToValueMap(filedNameToValueMap, JsonUtil.toString(values),
JsonUtil.toString(revisedValues), expression.getFieldName());
}
}
}
public void getFiledNameToValueMap(Map<String, Map<String, String>> filedNameToValueMap,
String oldValue, String newValue, String fieldName) {
Map<String, String> map = new HashMap<>();
map.put(oldValue, newValue);
filedNameToValueMap.put(fieldName, map);
}
@Around("execution(* com.tencent.supersonic.headless.core.rest.QueryController.queryByStruct(..))"
+ " || execution(* com.tencent.supersonic.headless.core.service.QueryService.queryByStruct(..))"
+ " || execution(* com.tencent.supersonic.headless.core.service.QueryService.queryByStructWithAuth(..))")
public Object handleDimValue(ProceedingJoinPoint joinPoint) throws Throwable {
if (!dimensionValueMapEnable) {
log.debug("dimensionValueMapEnable is false, skip dimensionValueMap");
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) joinPoint.proceed();
return queryResultWithColumns;
}
Object[] args = joinPoint.getArgs();
QueryStructReq queryStructReq = (QueryStructReq) args[0];
MetaFilter metaFilter = new MetaFilter(Lists.newArrayList(queryStructReq.getModelIds()));
List<DimensionResp> dimensions = dimensionService.getDimensions(metaFilter);
Map<String, Map<String, String>> dimAndAliasAndTechNamePair = getAliasAndBizNameToTechName(dimensions);
Map<String, Map<String, String>> dimAndTechNameAndBizNamePair = getTechNameToBizName(dimensions);
rewriteFilter(queryStructReq.getDimensionFilters(), dimAndAliasAndTechNamePair);
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) joinPoint.proceed();
if (Objects.nonNull(queryResultWithColumns)) {
rewriteDimValue(queryResultWithColumns, dimAndTechNameAndBizNamePair);
}
return queryResultWithColumns;
}
private void rewriteDimValue(QueryResultWithSchemaResp queryResultWithColumns,
Map<String, Map<String, String>> dimAndTechNameAndBizNamePair) {
if (!selectDimValueMap(queryResultWithColumns.getColumns(), dimAndTechNameAndBizNamePair)) {
return;
}
log.debug("start rewriteDimValue for resultList");
for (Map<String, Object> line : queryResultWithColumns.getResultList()) {
for (String bizName : line.keySet()) {
if (dimAndTechNameAndBizNamePair.containsKey(bizName) && Objects.nonNull(line.get(bizName))) {
String techName = line.get(bizName).toString();
Map<String, String> techAndBizPair = dimAndTechNameAndBizNamePair.get(bizName);
if (!CollectionUtils.isEmpty(techAndBizPair) && techAndBizPair.containsKey(techName)) {
String bizValueName = techAndBizPair.get(techName);
if (Strings.isNotEmpty(bizValueName)) {
line.put(bizName, bizValueName);
}
}
}
}
}
}
private boolean selectDimValueMap(List<QueryColumn> columns, Map<String,
Map<String, String>> dimAndTechNameAndBizNamePair) {
if (CollectionUtils.isEmpty(dimAndTechNameAndBizNamePair)
|| CollectionUtils.isEmpty(dimAndTechNameAndBizNamePair)) {
return false;
}
for (QueryColumn queryColumn : columns) {
if (dimAndTechNameAndBizNamePair.containsKey(queryColumn.getNameEn())) {
return true;
}
}
return false;
}
private void rewriteFilter(List<Filter> dimensionFilters, Map<String, Map<String, String>> aliasAndTechNamePair) {
for (Filter filter : dimensionFilters) {
if (Objects.isNull(filter)) {
continue;
}
if (CollectionUtils.isEmpty(filter.getChildren())) {
Object value = filter.getValue();
String bizName = filter.getBizName();
if (aliasAndTechNamePair.containsKey(bizName)) {
Map<String, String> aliasPair = aliasAndTechNamePair.get(bizName);
if (Objects.nonNull(value)) {
if (value instanceof List) {
List<String> values = (List) value;
List<String> valuesNew = new ArrayList<>();
for (String valueSingle : values) {
if (aliasPair.containsKey(valueSingle)) {
valuesNew.add(aliasPair.get(valueSingle));
} else {
valuesNew.add(valueSingle);
}
}
filter.setValue(valuesNew);
}
if (value instanceof String) {
if (aliasPair.containsKey(value)) {
filter.setValue(aliasPair.get(value));
}
}
}
}
return;
}
rewriteFilter(filter.getChildren(), aliasAndTechNamePair);
}
}
private Map<String, Map<String, String>> getAliasAndBizNameToTechName(List<DimensionResp> dimensions) {
if (CollectionUtils.isEmpty(dimensions)) {
return new HashMap<>();
}
Map<String, Map<String, String>> result = new HashMap<>();
for (DimensionResp dimension : dimensions) {
if (needSkipDimension(dimension)) {
continue;
}
String bizName = dimension.getBizName();
List<DimValueMap> dimValueMaps = dimension.getDimValueMaps();
Map<String, String> aliasAndBizNameToTechName = new HashMap<>();
for (DimValueMap dimValueMap : dimValueMaps) {
if (needSkipDimValue(dimValueMap)) {
continue;
}
if (Strings.isNotEmpty(dimValueMap.getBizName())) {
aliasAndBizNameToTechName.put(dimValueMap.getBizName(), dimValueMap.getTechName());
}
if (!CollectionUtils.isEmpty(dimValueMap.getAlias())) {
dimValueMap.getAlias().stream().forEach(alias -> {
if (Strings.isNotEmpty(alias)) {
aliasAndBizNameToTechName.put(alias, dimValueMap.getTechName());
}
});
}
}
if (!CollectionUtils.isEmpty(aliasAndBizNameToTechName)) {
result.put(bizName, aliasAndBizNameToTechName);
}
}
return result;
}
private boolean needSkipDimValue(DimValueMap dimValueMap) {
return Objects.isNull(dimValueMap) || Strings.isEmpty(dimValueMap.getTechName());
}
private Map<String, Map<String, String>> getTechNameToBizName(List<DimensionResp> dimensions) {
if (CollectionUtils.isEmpty(dimensions)) {
return new HashMap<>();
}
Map<String, Map<String, String>> result = new HashMap<>();
for (DimensionResp dimension : dimensions) {
if (needSkipDimension(dimension)) {
continue;
}
String bizName = dimension.getBizName();
List<DimValueMap> dimValueMaps = dimension.getDimValueMaps();
Map<String, String> techNameToBizName = new HashMap<>();
for (DimValueMap dimValueMap : dimValueMaps) {
if (needSkipDimValue(dimValueMap)) {
continue;
}
if (StringUtils.isNotEmpty(dimValueMap.getBizName())) {
techNameToBizName.put(dimValueMap.getTechName(), dimValueMap.getBizName());
}
}
if (!CollectionUtils.isEmpty(techNameToBizName)) {
result.put(bizName, techNameToBizName);
}
}
return result;
}
private boolean needSkipDimension(DimensionResp dimension) {
return Objects.isNull(dimension) || Strings.isEmpty(dimension.getBizName()) || CollectionUtils.isEmpty(
dimension.getDimValueMaps());
}
}

View File

@@ -0,0 +1,187 @@
package com.tencent.supersonic.headless.core.aspect;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authorization.response.AuthorizedResourceResp;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.exception.InvalidPermissionException;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserAddHelper;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.MINUS;
@Component
@Aspect
@Order(1)
@Slf4j
public class S2SQLDataAspect extends AuthCheckBaseAspect {
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private DimensionService dimensionService;
@Autowired
private ModelService modelService;
@Value("${permission.data.enable:true}")
private Boolean permissionDataEnable;
@Pointcut("@annotation(com.tencent.supersonic.headless.core.annotation.S2SQLDataPermission)")
private void s2SQLPermissionCheck() {
}
@Around("s2SQLPermissionCheck()")
public Object doAround(ProceedingJoinPoint joinPoint) throws Throwable {
log.info("s2SQL permission check!");
Object[] objects = joinPoint.getArgs();
QueryS2SQLReq queryS2SQLReq = (QueryS2SQLReq) objects[0];
User user = (User) objects[1];
if (!permissionDataEnable) {
log.info("not to check s2SQL permission!");
return joinPoint.proceed();
}
if (Objects.isNull(user) || Strings.isNullOrEmpty(user.getName())) {
throw new RuntimeException("please provide user information");
}
List<Long> modelIds = queryS2SQLReq.getModelIds();
//1. determine whether admin of the model
if (doModelAdmin(user, modelIds)) {
log.info("determine whether admin of the model!");
return joinPoint.proceed();
}
// 2. determine whether the subject field is visible
doModelVisible(user, modelIds);
// 3. fetch data permission meta information
Set<String> res4Privilege = queryStructUtils.getResNameEnExceptInternalCol(queryS2SQLReq, user);
log.info("modelId:{}, res4Privilege:{}", modelIds, res4Privilege);
Set<String> sensitiveResByModel = getHighSensitiveColsByModelId(modelIds);
Set<String> sensitiveResReq = res4Privilege.parallelStream()
.filter(sensitiveResByModel::contains).collect(Collectors.toSet());
log.info("this query domainId:{}, sensitiveResReq:{}", modelIds, sensitiveResReq);
// query user privilege info
AuthorizedResourceResp authorizedResource = getAuthorizedResource(user, modelIds, sensitiveResReq);
// get sensitiveRes that user has privilege
Set<String> resAuthSet = getAuthResNameSet(authorizedResource, modelIds);
// 4.if sensitive fields without permission are involved in filter, thrown an exception
doFilterCheckLogic(queryS2SQLReq, resAuthSet, sensitiveResReq);
// 5.row permission pre-filter
doRowPermission(queryS2SQLReq, authorizedResource);
// 6.proceed
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) joinPoint.proceed();
if (CollectionUtils.isEmpty(sensitiveResReq) || allSensitiveResReqIsOk(sensitiveResReq, resAuthSet)) {
// if sensitiveRes is empty
log.info("sensitiveResReq is empty");
return getQueryResultWithColumns(queryResultWithColumns, modelIds, authorizedResource);
}
// 6.if the column has no permission, hit *
Set<String> need2Apply = sensitiveResReq.stream().filter(req -> !resAuthSet.contains(req))
.collect(Collectors.toSet());
log.info("need2Apply:{},sensitiveResReq:{},resAuthSet:{}", need2Apply, sensitiveResReq, resAuthSet);
QueryResultWithSchemaResp queryResultAfterDesensitization =
desensitizationData(queryResultWithColumns, need2Apply);
addPromptInfoInfo(modelIds, queryResultAfterDesensitization, authorizedResource, need2Apply);
return queryResultAfterDesensitization;
}
private void doRowPermission(QueryS2SQLReq queryS2SQLReq, AuthorizedResourceResp authorizedResource) {
log.debug("start doRowPermission logic");
StringJoiner joiner = new StringJoiner(" OR ");
List<String> dimensionFilters = new ArrayList<>();
if (!CollectionUtils.isEmpty(authorizedResource.getFilters())) {
authorizedResource.getFilters().stream()
.forEach(filter -> dimensionFilters.addAll(filter.getExpressions()));
}
if (CollectionUtils.isEmpty(dimensionFilters)) {
log.debug("dimensionFilters is empty");
return;
}
dimensionFilters.stream().forEach(filter -> {
if (StringUtils.isNotEmpty(filter) && StringUtils.isNotEmpty(filter.trim())) {
joiner.add(" ( " + filter + " ) ");
}
});
try {
Expression expression = CCJSqlParserUtil.parseCondExpression(" ( " + joiner + " ) ");
if (StringUtils.isNotEmpty(joiner.toString())) {
String sql = SqlParserAddHelper.addWhere(queryS2SQLReq.getSql(), expression);
log.info("before doRowPermission, queryS2SQLReq:{}", queryS2SQLReq.getSql());
queryS2SQLReq.setSql(sql);
log.info("after doRowPermission, queryS2SQLReq:{}", queryS2SQLReq.getSql());
}
} catch (JSQLParserException jsqlParserException) {
log.info("jsqlParser has an exception:{}", jsqlParserException.toString());
}
}
private void doFilterCheckLogic(QueryS2SQLReq queryS2SQLReq, Set<String> resAuthName,
Set<String> sensitiveResReq) {
Set<String> resFilterSet = queryStructUtils.getFilterResNameEnExceptInternalCol(queryS2SQLReq);
Set<String> need2Apply = resFilterSet.stream()
.filter(res -> !resAuthName.contains(res) && sensitiveResReq.contains(res)).collect(Collectors.toSet());
Set<String> nameCnSet = new HashSet<>();
List<Long> modelIds = Lists.newArrayList(queryS2SQLReq.getModelIds());
ModelFilter modelFilter = new ModelFilter();
modelFilter.setModelIds(modelIds);
List<ModelResp> modelInfos = modelService.getModelList(modelFilter);
String modelNameCn = Constants.EMPTY;
if (!CollectionUtils.isEmpty(modelInfos)) {
modelNameCn = modelInfos.get(0).getName();
}
MetaFilter metaFilter = new MetaFilter(modelIds);
List<DimensionResp> dimensionDescList = dimensionService.getDimensions(metaFilter);
String finalDomainNameCn = modelNameCn;
dimensionDescList.stream().filter(dim -> need2Apply.contains(dim.getBizName()))
.forEach(dim -> nameCnSet.add(finalDomainNameCn + MINUS + dim.getName()));
if (!CollectionUtils.isEmpty(need2Apply)) {
ModelResp modelResp = modelInfos.get(0);
List<String> admins = modelService.getModelAdmin(modelResp.getId());
log.info("in doFilterLogic, need2Apply:{}", need2Apply);
String message = String.format("您没有以下维度%s权限, 请联系管理员%s开通", nameCnSet, admins);
throw new InvalidPermissionException(message);
}
}
}

View File

@@ -0,0 +1,183 @@
package com.tencent.supersonic.headless.core.aspect;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authorization.response.AuthorizedResourceResp;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.pojo.exception.InvalidPermissionException;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.MINUS;
@Component
@Aspect
@Slf4j
public class StructDataAspect extends AuthCheckBaseAspect {
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private DimensionService dimensionService;
@Autowired
private ModelService modelService;
@Value("${permission.data.enable:true}")
private Boolean permissionDataEnable;
@Pointcut("@annotation(com.tencent.supersonic.headless.core.annotation.StructDataPermission)")
public void dataPermissionAOP() {
}
@Around(value = "dataPermissionAOP()")
public Object around(ProceedingJoinPoint point) throws Throwable {
Object[] args = point.getArgs();
QueryStructReq queryStructReq = (QueryStructReq) args[0];
User user = (User) args[1];
if (!permissionDataEnable) {
log.info("permissionDataEnable is false");
return point.proceed();
}
if (Objects.isNull(user) || Strings.isNullOrEmpty(user.getName())) {
throw new RuntimeException("lease provide user information");
}
//1. determine whether admin of the model
if (doModelAdmin(user, queryStructReq.getModelIds())) {
return point.proceed();
}
// 2. determine whether the subject field is visible
doModelVisible(user, queryStructReq.getModelIds());
// 3. fetch data permission meta information
List<Long> modelIds = queryStructReq.getModelIds();
Set<String> res4Privilege = queryStructUtils.getResNameEnExceptInternalCol(queryStructReq);
log.info("modelId:{}, res4Privilege:{}", modelIds, res4Privilege);
Set<String> sensitiveResByModel = getHighSensitiveColsByModelId(modelIds);
Set<String> sensitiveResReq = res4Privilege.parallelStream()
.filter(sensitiveResByModel::contains).collect(Collectors.toSet());
log.info("this query domainId:{}, sensitiveResReq:{}", modelIds, sensitiveResReq);
// query user privilege info
AuthorizedResourceResp authorizedResource = getAuthorizedResource(user,
modelIds, sensitiveResReq);
// get sensitiveRes that user has privilege
Set<String> resAuthSet = getAuthResNameSet(authorizedResource,
queryStructReq.getModelIds());
// 4.if sensitive fields without permission are involved in filter, thrown an exception
doFilterCheckLogic(queryStructReq, resAuthSet, sensitiveResReq);
// 5.row permission pre-filter
doRowPermission(queryStructReq, authorizedResource);
// 6.proceed
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) point.proceed();
if (CollectionUtils.isEmpty(sensitiveResReq) || allSensitiveResReqIsOk(sensitiveResReq, resAuthSet)) {
// if sensitiveRes is empty
log.info("sensitiveResReq is empty");
return getQueryResultWithColumns(queryResultWithColumns, modelIds, authorizedResource);
}
// 6.if the column has no permission, hit *
Set<String> need2Apply = sensitiveResReq.stream().filter(req -> !resAuthSet.contains(req))
.collect(Collectors.toSet());
QueryResultWithSchemaResp queryResultAfterDesensitization =
desensitizationData(queryResultWithColumns, need2Apply);
addPromptInfoInfo(modelIds, queryResultAfterDesensitization, authorizedResource, need2Apply);
return queryResultAfterDesensitization;
}
public boolean allSensitiveResReqIsOk(Set<String> sensitiveResReq, Set<String> resAuthSet) {
if (resAuthSet.containsAll(sensitiveResReq)) {
return true;
}
log.info("sensitiveResReq:{}, resAuthSet:{}", sensitiveResReq, resAuthSet);
return false;
}
private void doRowPermission(QueryStructReq queryStructReq, AuthorizedResourceResp authorizedResource) {
log.debug("start doRowPermission logic");
StringJoiner joiner = new StringJoiner(" OR ");
List<String> dimensionFilters = new ArrayList<>();
if (!CollectionUtils.isEmpty(authorizedResource.getFilters())) {
authorizedResource.getFilters().stream()
.forEach(filter -> dimensionFilters.addAll(filter.getExpressions()));
}
if (CollectionUtils.isEmpty(dimensionFilters)) {
log.debug("dimensionFilters is empty");
return;
}
dimensionFilters.stream().forEach(filter -> {
if (StringUtils.isNotEmpty(filter) && StringUtils.isNotEmpty(filter.trim())) {
joiner.add(" ( " + filter + " ) ");
}
});
if (StringUtils.isNotEmpty(joiner.toString())) {
log.info("before doRowPermission, queryStructReq:{}", queryStructReq);
Filter filter = new Filter("", FilterOperatorEnum.SQL_PART, joiner.toString());
List<Filter> filters = Objects.isNull(queryStructReq.getOriginalFilter()) ? new ArrayList<>()
: queryStructReq.getOriginalFilter();
filters.add(filter);
queryStructReq.setDimensionFilters(filters);
log.info("after doRowPermission, queryStructReq:{}", queryStructReq);
}
}
private void doFilterCheckLogic(QueryStructReq queryStructReq, Set<String> resAuthName,
Set<String> sensitiveResReq) {
Set<String> resFilterSet = queryStructUtils.getFilterResNameEnExceptInternalCol(queryStructReq);
Set<String> need2Apply = resFilterSet.stream()
.filter(res -> !resAuthName.contains(res) && sensitiveResReq.contains(res)).collect(Collectors.toSet());
Set<String> nameCnSet = new HashSet<>();
Map<Long, ModelResp> modelRespMap = modelService.getModelMap();
List<Long> modelIds = Lists.newArrayList(queryStructReq.getModelIds());
List<DimensionResp> dimensionDescList = dimensionService.getDimensions(new MetaFilter(modelIds));
dimensionDescList.stream().filter(dim -> need2Apply.contains(dim.getBizName()))
.forEach(dim -> nameCnSet.add(modelRespMap.get(dim.getModelId()).getName() + MINUS + dim.getName()));
if (!CollectionUtils.isEmpty(need2Apply)) {
List<String> admins = modelService.getModelAdmin(modelIds.get(0));
log.info("in doFilterLogic, need2Apply:{}", need2Apply);
String message = String.format("您没有以下维度%s权限, 请联系管理员%s开通", nameCnSet, admins);
throw new InvalidPermissionException(message);
}
}
}

View File

@@ -0,0 +1,42 @@
package com.tencent.supersonic.headless.core.executor;
import com.tencent.supersonic.headless.common.server.response.DatabaseResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.service.Catalog;
import com.tencent.supersonic.headless.server.utils.SqlUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Component;
@Component("JdbcExecutor")
@Slf4j
public class JdbcExecutor implements QueryExecutor {
private final SqlUtils sqlUtils;
public JdbcExecutor(SqlUtils sqlUtils) {
this.sqlUtils = sqlUtils;
}
@Override
public boolean accept(QueryStatement queryStatement) {
return true;
}
@Override
public QueryResultWithSchemaResp execute(Catalog catalog, QueryStatement queryStatement) {
if (Strings.isEmpty(queryStatement.getSourceId())) {
log.warn("data base id is empty");
return null;
}
log.info("query SQL: {}", queryStatement.getSql());
DatabaseResp databaseResp = catalog.getDatabase(Long.parseLong(queryStatement.getSourceId()));
log.info("database info:{}", databaseResp);
QueryResultWithSchemaResp queryResultWithColumns = new QueryResultWithSchemaResp();
SqlUtils sqlUtils = this.sqlUtils.init(databaseResp);
sqlUtils.queryInternal(queryStatement.getSql(), queryResultWithColumns);
return queryResultWithColumns;
}
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.executor;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.service.Catalog;
public interface QueryExecutor {
boolean accept(QueryStatement queryStatement);
QueryResultWithSchemaResp execute(Catalog catalog, QueryStatement queryStatement);
}

View File

@@ -0,0 +1,39 @@
package com.tencent.supersonic.headless.core.optimizer;
import com.google.common.base.Strings;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import java.util.Objects;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component("DetailQuery")
public class DetailQuery implements QueryOptimizer {
@Override
public void rewrite(QueryStructReq queryStructCmd, QueryStatement queryStatement) {
String sqlRaw = queryStatement.getSql().trim();
if (Strings.isNullOrEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null");
}
log.debug("before handleNoMetric, sql:{}", sqlRaw);
if (isDetailQuery(queryStructCmd)) {
if (queryStructCmd.getMetrics().size() == 0 && !CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
String sqlForm = "select %s from ( %s ) src_no_metric";
String sql = String.format(sqlForm, queryStructCmd.getGroups().stream().collect(
Collectors.joining(",")), sqlRaw);
queryStatement.setSql(sql);
}
}
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
}
public boolean isDetailQuery(QueryStructReq queryStructCmd) {
return Objects.nonNull(queryStructCmd) && queryStructCmd.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(queryStructCmd.getMetrics());
}
}

View File

@@ -0,0 +1,8 @@
package com.tencent.supersonic.headless.core.optimizer;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
public interface QueryOptimizer {
void rewrite(QueryStructReq queryStructCmd, QueryStatement queryStatement);
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.parser;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.service.Catalog;
public interface HeadlessConverter {
boolean accept(QueryStatement queryStatement);
void converter(Catalog catalog, QueryStatement queryStatement) throws Exception;
}

View File

@@ -0,0 +1,136 @@
package com.tencent.supersonic.headless.core.parser;
import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.pojo.MetricTable;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Component
@Slf4j
@Primary
public class QueryParser {
private final Catalog catalog;
public QueryParser(Catalog catalog) {
this.catalog = catalog;
}
public QueryStatement logicSql(QueryStatement queryStatement) throws Exception {
QueryStructReq queryStructReq = queryStatement.getQueryStructReq();
if (Objects.isNull(queryStatement.getParseSqlReq())) {
queryStatement.setParseSqlReq(new ParseSqlReq());
}
if (Objects.isNull(queryStatement.getMetricReq())) {
queryStatement.setMetricReq(new MetricReq());
}
log.info("SemanticConverter before [{}]", queryStructReq);
for (HeadlessConverter headlessConverter : ComponentFactory.getSemanticConverters()) {
if (headlessConverter.accept(queryStatement)) {
log.info("SemanticConverter accept [{}]", headlessConverter.getClass().getName());
headlessConverter.converter(catalog, queryStatement);
}
}
log.info("SemanticConverter after {} {} {}", queryStructReq, queryStatement.getParseSqlReq(),
queryStatement.getMetricReq());
if (!queryStatement.getParseSqlReq().getSql().isEmpty()) {
return parser(queryStatement.getParseSqlReq(), queryStatement);
}
queryStatement.getMetricReq().setNativeQuery(queryStructReq.getQueryType().isNativeAggQuery());
return parser(queryStatement);
}
public QueryStatement parser(ParseSqlReq sqlCommend, QueryStatement queryStatement) {
log.info("parser MetricReq [{}] ", sqlCommend);
try {
if (!CollectionUtils.isEmpty(sqlCommend.getTables())) {
List<String[]> tables = new ArrayList<>();
String sourceId = "";
for (MetricTable metricTable : sqlCommend.getTables()) {
MetricReq metricReq = new MetricReq();
metricReq.setMetrics(metricTable.getMetrics());
metricReq.setDimensions(metricTable.getDimensions());
metricReq.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere()));
metricReq.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption()));
metricReq.setRootPath(sqlCommend.getRootPath());
QueryStatement tableSql = new QueryStatement();
tableSql.setIsS2SQL(false);
tableSql.setMetricReq(metricReq);
tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
tableSql = parser(tableSql, metricTable.getAggOption());
if (!tableSql.isOk()) {
queryStatement.setErrMsg(String.format("parser table [%s] error [%s]", metricTable.getAlias(),
tableSql.getErrMsg()));
return queryStatement;
}
tables.add(new String[]{metricTable.getAlias(), tableSql.getSql()});
sourceId = tableSql.getSourceId();
}
if (!tables.isEmpty()) {
String sql = "";
if (sqlCommend.isSupportWith()) {
sql = "with " + String.join(",",
tables.stream().map(t -> String.format("%s as (%s)", t[0], t[1])).collect(
Collectors.toList())) + "\n" + sqlCommend.getSql();
} else {
sql = sqlCommend.getSql();
for (String[] tb : tables) {
sql = StringUtils.replace(sql, tb[0],
"(" + tb[1] + ") " + (sqlCommend.isWithAlias() ? "" : tb[0]), -1);
}
}
queryStatement.setSql(sql);
queryStatement.setSourceId(sourceId);
queryStatement.setParseSqlReq(sqlCommend);
return queryStatement;
}
}
} catch (Exception e) {
log.error("physicalSql error {}", e);
queryStatement.setErrMsg(e.getMessage());
}
return queryStatement;
}
public QueryStatement parser(QueryStatement queryStatement) {
return parser(queryStatement, AggOption.getAggregation(queryStatement.getMetricReq().isNativeQuery()));
}
public QueryStatement parser(QueryStatement queryStatement, AggOption isAgg) {
MetricReq metricCommand = queryStatement.getMetricReq();
log.info("parser MetricReq [{}] isAgg [{}]", metricCommand, isAgg);
if (metricCommand.getRootPath().isEmpty()) {
queryStatement.setErrMsg("rootPath empty");
return queryStatement;
}
try {
queryStatement = ComponentFactory.getSqlParser().explain(queryStatement, isAgg, catalog);
return queryStatement;
} catch (Exception e) {
queryStatement.setErrMsg(e.getMessage());
log.error("parser error MetricCommand[{}] error [{}]", metricCommand, e);
}
return queryStatement;
}
}

View File

@@ -0,0 +1,10 @@
package com.tencent.supersonic.headless.core.parser;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.service.Catalog;
public interface SqlParser {
QueryStatement explain(QueryStatement queryStatement, AggOption aggOption, Catalog catalog) throws Exception;
}

View File

@@ -0,0 +1,51 @@
package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.SqlParser;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.RuntimeOptions;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.service.Catalog;
import org.springframework.stereotype.Component;
@Component("CalciteSqlParser")
public class CalciteSqlParser implements SqlParser {
private final HeadlessSchemaManager headlessSchemaManager;
public CalciteSqlParser(
HeadlessSchemaManager headlessSchemaManager) {
this.headlessSchemaManager = headlessSchemaManager;
}
@Override
public QueryStatement explain(QueryStatement queryStatement, AggOption isAgg, Catalog catalog) throws Exception {
MetricReq metricReq = queryStatement.getMetricReq();
HeadlessModel headlessModel = headlessSchemaManager.get(metricReq.getRootPath());
if (headlessModel == null) {
queryStatement.setErrMsg("semanticSchema not found");
return queryStatement;
}
queryStatement.setMetricReq(metricReq);
HeadlessSchema headlessSchema = getSemanticSchema(headlessModel, queryStatement);
AggPlanner aggBuilder = new AggPlanner(headlessSchema);
aggBuilder.explain(queryStatement, isAgg);
queryStatement.setSql(aggBuilder.getSql());
queryStatement.setSourceId(aggBuilder.getSourceId());
return queryStatement;
}
private HeadlessSchema getSemanticSchema(HeadlessModel headlessModel, QueryStatement queryStatement) {
HeadlessSchema headlessSchema = HeadlessSchema.newBuilder(headlessModel.getRootPath()).build();
headlessSchema.setDatasource(headlessModel.getDatasourceMap());
headlessSchema.setDimension(headlessModel.getDimensionMap());
headlessSchema.setMetric(headlessModel.getMetrics());
headlessSchema.setJoinRelations(headlessModel.getJoinRelations());
headlessSchema.setRuntimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
.enableOptimize(queryStatement.getEnableOptimize()).build());
return headlessSchema;
}
}

View File

@@ -0,0 +1,114 @@
package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSqlTypeFactoryImpl;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.schema.ViewExpanderImpl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.config.Lex;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.prepare.Prepare.CatalogReader;
import org.apache.calcite.rel.hint.HintStrategyTable;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.impl.SqlParserImpl;
import org.apache.calcite.sql.util.ChainedSqlOperatorTable;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
public class Configuration {
public static Properties configProperties = new Properties();
public static RelDataTypeFactory typeFactory = new HeadlessSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withConformance(SemanticSqlDialect.DEFAULT.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withLenientOperatorLookup(true);
static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
configProperties.put(CalciteConnectionProperty.UNQUOTED_CASING.camelName(), Casing.UNCHANGED.toString());
configProperties.put(CalciteConnectionProperty.QUOTED_CASING.camelName(), Casing.TO_LOWER.toString());
}
public static SqlParser.Config getParserConfig() {
CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder();
parserConfig.setCaseSensitive(config.caseSensitive());
parserConfig.setUnquotedCasing(config.unquotedCasing());
parserConfig.setQuotedCasing(config.quotedCasing());
parserConfig.setConformance(config.conformance());
parserConfig.setLex(Lex.BIG_QUERY);
parserConfig.setParserFactory(SqlParserImpl.FACTORY).setCaseSensitive(false)
.setIdentifierMaxLength(Integer.MAX_VALUE)
.setQuoting(Quoting.BACK_TICK)
.setQuoting(Quoting.SINGLE_QUOTE)
.setQuotedCasing(Casing.TO_UPPER)
.setUnquotedCasing(Casing.TO_UPPER)
.setConformance(SemanticSqlDialect.DEFAULT.getConformance())
.setLex(Lex.BIG_QUERY);
return parserConfig.build();
}
public static SqlValidator getSqlValidator(CalciteSchema rootSchema) {
List<SqlOperatorTable> tables = new ArrayList<>();
tables.add(SqlStdOperatorTable.instance());
SqlOperatorTable operatorTable = new ChainedSqlOperatorTable(tables);
//operatorTable.
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
rootSchema,
Collections.singletonList(rootSchema.getName()),
typeFactory,
config
);
SqlValidator validator = SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory,
validatorConfig);
return validator;
}
public static SqlToRelConverter.Config getConverterConfig() {
HintStrategyTable strategies = HintStrategyTable.builder().build();
return SqlToRelConverter.config()
.withHintStrategyTable(strategies)
.withTrimUnusedFields(true)
.withExpand(true);
}
public static SqlToRelConverter getSqlToRelConverter(SqlValidatorScope scope, SqlValidator sqlValidator,
RelOptPlanner relOptPlanner) {
RexBuilder rexBuilder = new RexBuilder(typeFactory);
RelOptCluster cluster = RelOptCluster.create(relOptPlanner, rexBuilder);
FrameworkConfig fromworkConfig = Frameworks.newConfigBuilder()
.parserConfig(getParserConfig())
.defaultSchema(scope.getValidator().getCatalogReader().getRootSchema().plus())
.build();
return new SqlToRelConverter(new ViewExpanderImpl(),
sqlValidator,
(CatalogReader) scope.getValidator().getCatalogReader(), cluster, fromworkConfig.getConvertletTable(),
getConverterConfig());
}
}

View File

@@ -0,0 +1,336 @@
package com.tencent.supersonic.headless.core.parser.calcite;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization.TimePartType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.MetricTypeParams;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@Slf4j
@Service
public class HeadlessSchemaManager {
@Autowired
private LoadingCache<String, HeadlessModel> loadingCache;
private final Catalog catalog;
public HeadlessSchemaManager(Catalog catalog) {
this.catalog = catalog;
}
public HeadlessModel reload(String rootPath) {
HeadlessModel headlessModel = new HeadlessModel();
headlessModel.setRootPath(rootPath);
Set<Long> modelIds = Arrays.stream(rootPath.split(",")).map(s -> Long.parseLong(s.trim()))
.collect(Collectors.toSet());
if (modelIds.isEmpty()) {
log.error("get modelIds empty {}", rootPath);
return headlessModel;
}
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
Map<Long, String> modelIdName = new HashMap<>();
catalog.getModelYamlTplByModelIds(modelIds, dimensionYamlTpls, dataModelYamlTpls, metricYamlTpls, modelIdName);
List<ModelRela> modelRelas = catalog.getModelRela(new ArrayList<>(modelIds));
if (!CollectionUtils.isEmpty(modelRelas)) {
headlessModel.setJoinRelations(getJoinRelation(modelRelas, modelIdName));
}
if (!dataModelYamlTpls.isEmpty()) {
Map<String, DataSource> dataSourceMap = dataModelYamlTpls.stream().map(d -> getDatasource(d))
.collect(Collectors.toMap(DataSource::getName, item -> item, (k1, k2) -> k1));
headlessModel.setDatasourceMap(dataSourceMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
headlessModel.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
headlessModel.setMetrics(getMetrics(metricYamlTpls));
}
return headlessModel;
}
//private Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
public HeadlessModel get(String rootPath) throws Exception {
rootPath = formatKey(rootPath);
HeadlessModel schema = loadingCache.get(rootPath);
if (schema == null) {
return null;
}
return schema;
}
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) {
return getMetricsByMetricYamlTpl(t);
}
public static List<Dimension> getDimensions(final List<DimensionYamlTpl> t) {
return getDimension(t);
}
public static DataSource getDatasource(final DataModelYamlTpl d) {
DataSource datasource = DataSource.builder().sourceId(d.getSourceId()).sqlQuery(d.getSqlQuery())
.name(d.getName()).tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
.measures(getMeasures(d.getMeasures())).dimensions(getDimensions(d.getDimensions())).build();
datasource.setAggTime(getDataSourceAggTime(datasource.getDimensions()));
if (Objects.nonNull(d.getModelSourceTypeEnum())) {
datasource.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
}
return datasource;
}
private static String getDataSourceAggTime(List<Dimension> dimensions) {
Optional<Dimension> timeDimension = dimensions.stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
if (timeDimension.isPresent() && Objects.nonNull(timeDimension.get().getDimensionTimeTypeParams())) {
return timeDimension.get().getDimensionTimeTypeParams().getTimeGranularity();
}
return Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE;
}
private static List<Metric> getMetricsByMetricYamlTpl(List<MetricYamlTpl> metricYamlTpls) {
List<Metric> metrics = new ArrayList<>();
for (MetricYamlTpl metricYamlTpl : metricYamlTpls) {
Metric metric = new Metric();
metric.setMetricTypeParams(getMetricTypeParams(metricYamlTpl.getTypeParams()));
metric.setOwners(metricYamlTpl.getOwners());
metric.setType(metricYamlTpl.getType());
metric.setName(metricYamlTpl.getName());
metrics.add(metric);
}
return metrics;
}
private static MetricTypeParams getMetricTypeParams(MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) {
MetricTypeParams metricTypeParams = new MetricTypeParams();
metricTypeParams.setExpr(metricTypeParamsYamlTpl.getExpr());
metricTypeParams.setMeasures(getMeasures(metricTypeParamsYamlTpl.getMeasures()));
return metricTypeParams;
}
private static List<Measure> getMeasures(List<MeasureYamlTpl> measureYamlTpls) {
List<Measure> measures = new ArrayList<>();
for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) {
Measure measure = new Measure();
measure.setCreateMetric(measureYamlTpl.getCreateMetric());
measure.setExpr(measureYamlTpl.getExpr());
measure.setAgg(measureYamlTpl.getAgg());
measure.setName(measureYamlTpl.getName());
measure.setAlias(measureYamlTpl.getAlias());
measure.setConstraint(measureYamlTpl.getConstraint());
measures.add(measure);
}
return measures;
}
private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
List<Dimension> dimensions = new ArrayList<>();
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
Dimension dimension = Dimension.builder().build();
dimension.setType(dimensionYamlTpl.getType());
dimension.setExpr(dimensionYamlTpl.getExpr());
dimension.setName(dimensionYamlTpl.getName());
dimension.setOwners(dimensionYamlTpl.getOwners());
if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
dimension.setDataType(DataType.of(dimensionYamlTpl.getDataType().getType()));
}
if (Objects.isNull(dimension.getDataType())) {
dimension.setDataType(DataType.UNKNOWN);
}
dimension.setDimensionTimeTypeParams(getDimensionTimeTypeParams(dimensionYamlTpl.getTypeParams()));
dimensions.add(dimension);
}
return dimensions;
}
private static DimensionTimeTypeParams getDimensionTimeTypeParams(
DimensionTimeTypeParamsTpl dimensionTimeTypeParamsTpl) {
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
if (dimensionTimeTypeParamsTpl != null) {
dimensionTimeTypeParams.setTimeGranularity(dimensionTimeTypeParamsTpl.getTimeGranularity());
dimensionTimeTypeParams.setIsPrimary(dimensionTimeTypeParamsTpl.getIsPrimary());
}
return dimensionTimeTypeParams;
}
private static List<Identify> getIdentify(List<IdentifyYamlTpl> identifyYamlTpls) {
List<Identify> identifies = new ArrayList<>();
for (IdentifyYamlTpl identifyYamlTpl : identifyYamlTpls) {
Identify identify = new Identify();
identify.setType(identifyYamlTpl.getType());
identify.setName(identifyYamlTpl.getName());
identifies.add(identify);
}
return identifies;
}
private static List<JoinRelation> getJoinRelation(List<ModelRela> modelRelas, Map<Long, String> modelIdName) {
List<JoinRelation> joinRelations = new ArrayList<>();
modelRelas.stream().forEach(r -> {
if (modelIdName.containsKey(r.getFromModelId()) && modelIdName.containsKey(r.getToModelId())) {
JoinRelation joinRelation = JoinRelation.builder().left(modelIdName.get(r.getFromModelId()))
.right(modelIdName.get(r.getToModelId())).joinType(r.getJoinType()).build();
List<Triple<String, String, String>> conditions = new ArrayList<>();
r.getJoinConditions().stream().forEach(rr -> {
if (FilterOperatorEnum.isValueCompare(rr.getOperator())) {
conditions.add(Triple.of(rr.getLeftField(), rr.getOperator().getValue(), rr.getRightField()));
}
});
joinRelation.setJoinCondition(conditions);
joinRelations.add(joinRelation);
}
});
return joinRelations;
}
public static void update(HeadlessSchema schema, List<Metric> metric) throws Exception {
if (schema != null) {
updateMetric(metric, schema.getMetrics());
}
}
public static void update(HeadlessSchema schema, DataSource datasourceYamlTpl) throws Exception {
if (schema != null) {
String dataSourceName = datasourceYamlTpl.getName();
Optional<Entry<String, DataSource>> datasourceYamlTplMap = schema.getDatasource().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
if (datasourceYamlTplMap.isPresent()) {
datasourceYamlTplMap.get().setValue(datasourceYamlTpl);
} else {
schema.getDatasource().put(dataSourceName, datasourceYamlTpl);
}
}
}
public static void update(HeadlessSchema schema, String datasourceBizName, List<Dimension> dimensionYamlTpls)
throws Exception {
if (schema != null) {
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema.getDimension().entrySet()
.stream().filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
if (datasourceYamlTplMap.isPresent()) {
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
} else {
List<Dimension> dimensions = new ArrayList<>();
updateDimension(dimensionYamlTpls, dimensions);
schema.getDimension().put(datasourceBizName, dimensions);
}
}
}
private static void updateDimension(List<Dimension> dimensionYamlTpls, List<Dimension> dimensions) {
if (CollectionUtils.isEmpty(dimensionYamlTpls)) {
return;
}
Set<String> toAdd = dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Dimension> iterator = dimensions.iterator();
while (iterator.hasNext()) {
Dimension cur = iterator.next();
if (toAdd.contains(cur.getName())) {
iterator.remove();
}
}
dimensions.addAll(dimensionYamlTpls);
}
private static void updateMetric(List<Metric> metricYamlTpls, List<Metric> metrics) {
if (CollectionUtils.isEmpty(metricYamlTpls)) {
return;
}
Set<String> toAdd = metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Metric> iterator = metrics.iterator();
while (iterator.hasNext()) {
Metric cur = iterator.next();
if (toAdd.contains(cur.getName())) {
iterator.remove();
}
}
metrics.addAll(metricYamlTpls);
}
public static String formatKey(String key) {
key = key.trim();
if (key.startsWith("/")) {
key = key.substring(1);
}
if (key.endsWith("/")) {
key = key.substring(0, key.length() - 1);
}
return key;
}
@Configuration
@EnableCaching
public class GuavaCacheConfig {
@Value("${parser.cache.saveMinute:1}")
private Integer saveMinutes = 1;
@Value("${parser.cache.maximumSize:1000}")
private Integer maximumSize = 1000;
@Bean
public LoadingCache<String, HeadlessModel> getCache() {
LoadingCache<String, HeadlessModel> cache
= CacheBuilder.newBuilder()
.expireAfterWrite(saveMinutes, TimeUnit.MINUTES)
.initialCapacity(10)
.maximumSize(maximumSize).build(
new CacheLoader<String, HeadlessModel>() {
@Override
public HeadlessModel load(String key) {
log.info("load SemanticSchema [{}]", key);
return HeadlessSchemaManager.this.reload(key);
}
}
);
return cache;
}
}
}

View File

@@ -0,0 +1,166 @@
package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.FilterRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.OutputRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.SourceRender;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Objects;
import java.util.Stack;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.rel2sql.RelToSqlConverter;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.sql2rel.SqlToRelConverter;
@Slf4j
public class AggPlanner implements Planner {
private MetricReq metricReq;
private HeadlessSchema schema;
private SqlValidatorScope scope;
private Stack<TableView> dataSets = new Stack<>();
private SqlNode parserNode;
private String sourceId;
private boolean isAgg = false;
private AggOption aggOption = AggOption.DEFAULT;
public AggPlanner(HeadlessSchema schema) {
this.schema = schema;
}
public void parse() throws Exception {
// find the match Datasource
scope = SchemaBuilder.getScope(schema);
List<DataSource> datasource = getMatchDataSource(scope);
if (datasource == null || datasource.isEmpty()) {
throw new Exception("datasource not found");
}
isAgg = getAgg(datasource.get(0));
sourceId = String.valueOf(datasource.get(0).getSourceId());
// build level by level
LinkedList<Renderer> builders = new LinkedList<>();
builders.add(new SourceRender());
builders.add(new FilterRender());
builders.add(new OutputRender());
ListIterator<Renderer> it = builders.listIterator();
int i = 0;
Renderer previous = null;
while (it.hasNext()) {
Renderer renderer = it.next();
if (previous != null) {
previous.render(metricReq, datasource, scope, schema, !isAgg);
renderer.setTable(previous.builderAs(DataSourceNode.getNames(datasource) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(metricReq, datasource, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
private List<DataSource> getMatchDataSource(SqlValidatorScope scope) throws Exception {
return DataSourceNode.getMatchDataSources(scope, schema, metricReq);
}
private boolean getAgg(DataSource dataSource) {
if (!AggOption.DEFAULT.equals(aggOption)) {
return AggOption.isAgg(aggOption);
}
// default by dataSource time aggregation
if (Objects.nonNull(dataSource.getAggTime()) && !dataSource.getAggTime().equalsIgnoreCase(
Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!metricReq.isNativeQuery()) {
return true;
}
}
return isAgg;
}
@Override
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception {
this.metricReq = queryStatement.getMetricReq();
if (metricReq.getMetrics() == null) {
metricReq.setMetrics(new ArrayList<>());
}
if (metricReq.getDimensions() == null) {
metricReq.setDimensions(new ArrayList<>());
}
if (metricReq.getLimit() == null) {
metricReq.setLimit(0L);
}
this.aggOption = aggOption;
// build a parse Node
parse();
// optimizer
optimize();
}
public void optimize() {
if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
}
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
hepProgramBuilder.addRuleInstance(new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema));
RelOptPlanner relOptPlanner = new HepPlanner(hepProgramBuilder.build());
RelToSqlConverter converter = new RelToSqlConverter(SemanticSqlDialect.DEFAULT);
SqlValidator sqlValidator = Configuration.getSqlValidator(
scope.getValidator().getCatalogReader().getRootSchema());
try {
log.info("before optimize {}", SemanticNode.getSql(parserNode));
SqlToRelConverter sqlToRelConverter = Configuration.getSqlToRelConverter(scope, sqlValidator,
relOptPlanner);
RelNode sqlRel = sqlToRelConverter.convertQuery(
sqlValidator.validate(parserNode), false, true).rel;
log.debug("RelNode optimize {}", SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement()));
relOptPlanner.setRoot(sqlRel);
RelNode relNode = relOptPlanner.findBestExp();
parserNode = converter.visitRoot(relNode).asStatement();
log.debug("after optimize {}", SemanticNode.getSql(parserNode));
} catch (Exception e) {
log.error("optimize error {}", e);
}
}
@Override
public String getSql() {
return SemanticNode.getSql(parserNode);
}
@Override
public String getSourceId() {
return sourceId;
}
@Override
public HeadlessSchema findBest() {
return schema;
}
}

View File

@@ -0,0 +1,338 @@
package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.common.util.calcite.SqlParseUtils;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization.TimePartType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.MaterializationElement;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.TimeRange;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptMaterialization;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.rel.RelDistributionTraitDef;
import org.apache.calcite.rel.RelHomogeneousShuttle;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelShuttle;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.rules.materialize.MaterializedViewRules;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.RelBuilder;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.springframework.util.CollectionUtils;
@Slf4j
public class MaterializationPlanner implements Planner {
protected HeadlessSchema schema;
protected CalciteSchema viewSchema;
protected HepProgramBuilder hepProgramBuilder;
protected RelOptPlanner relOptPlanner;
protected RelBuilder relBuilder;
protected CalciteCatalogReader calciteCatalogReader;
protected Comparator materializationSort = new Comparator<Entry<Long, Set<String>>>() {
@Override
public int compare(Entry<Long, Set<String>> o1, Entry<Long, Set<String>> o2) {
if (o1.getValue().size() == o2.getValue().size()) {
Optional<Materialization> o1Lever = schema.getMaterializationList().stream()
.filter(m -> m.getMaterializationId().equals(o1.getKey())).findFirst();
Optional<Materialization> o2Lever = schema.getMaterializationList().stream()
.filter(m -> m.getMaterializationId().equals(o2.getKey())).findFirst();
if (o1Lever.isPresent() && o2Lever.isPresent()) {
return o2Lever.get().getLevel() - o1Lever.get().getLevel();
}
return 0;
}
return o2.getValue().size() - o1.getValue().size();
}
};
public MaterializationPlanner(HeadlessSchema schema) {
this.schema = schema;
init();
}
@Override
public void explain(QueryStatement queryStatement, AggOption isAgg) throws Exception {
// findMatchMaterialization
// checkValid field + time
if (CollectionUtils.isEmpty(queryStatement.getTimeRanges())) {
//has no matchMaterialization time info
return;
}
Set<String> fields = new HashSet<>();
MetricReq metricCommand = queryStatement.getMetricReq();
if (!Objects.isNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
fields.addAll(SqlParseUtils.getFilterField(metricCommand.getWhere()));
}
if (!CollectionUtils.isEmpty(metricCommand.getMetrics())) {
fields.addAll(metricCommand.getMetrics());
}
if (!CollectionUtils.isEmpty(metricCommand.getDimensions())) {
fields.addAll(metricCommand.getDimensions());
}
Map<Long, Set<String>> matchMaterialization = new HashMap<>();
Map<Long, Long> materializationDataBase = schema.getMaterializationList().stream()
.collect(Collectors.toMap(Materialization::getMaterializationId, Materialization::getDataBase));
for (String elem : fields) {
boolean checkOk = false;
for (Materialization materialization : schema.getMaterializationList()) {
if (check(metricCommand, materialization, elem, queryStatement.getTimeRanges())) {
if (!matchMaterialization.containsKey(materialization.getMaterializationId())) {
matchMaterialization.put(materialization.getMaterializationId(), new HashSet<>());
}
matchMaterialization.get(materialization.getMaterializationId()).add(elem);
checkOk = true;
}
}
if (!checkOk) {
log.info("check fail [{}]", elem);
}
}
if (!CollectionUtils.isEmpty(matchMaterialization)) {
List<Entry<Long, Set<String>>> sortedMaterialization = new ArrayList<>(matchMaterialization.entrySet());
sortedMaterialization.stream().collect(Collectors.toList()).sort(materializationSort);
for (Entry<Long, Set<String>> m : sortedMaterialization) {
Optional<Materialization> materialization = schema.getMaterializationList().stream()
.filter(mz -> mz.getMaterializationId().equals(m.getKey())).findFirst();
if (!materialization.isPresent()) {
continue;
}
Set<String> viewField = new HashSet<>(m.getValue());
viewField.add(materialization.get().getEntities());
viewField.add(materialization.get().getDateInfo());
if (materialization.get().getTimePartType().equals(TimePartType.ZIPPER)) {
viewField.add(Constants.MATERIALIZATION_ZIPPER_START + materialization.get().getDateInfo());
viewField.add(Constants.MATERIALIZATION_ZIPPER_END + materialization.get().getDateInfo());
}
if (viewField.containsAll(fields)) {
addDataSource(materialization.get());
break;
}
List<Entry<Long, Set<String>>> linkMaterialization = new ArrayList<>();
for (Entry<Long, Set<String>> mm : sortedMaterialization) {
if (mm.getKey().equals(m.getKey())) {
continue;
}
if (materializationDataBase.get(mm.getKey()).equals(materializationDataBase.get(m.getKey()))) {
linkMaterialization.add(mm);
}
}
if (!CollectionUtils.isEmpty(linkMaterialization)) {
linkMaterialization.sort(materializationSort);
for (Entry<Long, Set<String>> mm : linkMaterialization) {
Set<String> linkField = new HashSet<>(mm.getValue());
linkField.addAll(viewField);
if (linkField.containsAll(fields)) {
Optional<Materialization> linkMaterial = schema.getMaterializationList().stream()
.filter(mz -> mz.getMaterializationId().equals(mm.getKey())).findFirst();
if (linkMaterial.isPresent()) {
addDataSource(materialization.get());
addDataSource(linkMaterial.get());
break;
}
}
}
}
}
}
}
private void addDataSource(Materialization materialization) {
Identify identify = new Identify();
identify.setName(materialization.getEntities());
List<Measure> metrics = materialization.getMetrics().stream()
.map(m -> Measure.builder().name(m.getName()).expr(m.getName()).build()).collect(
Collectors.toList());
List<Dimension> dimensions = materialization.getDimensions().stream()
.map(d -> Dimension.builder().name(d.getName()).expr(d.getName()).build()).collect(
Collectors.toList());
if (materialization.getTimePartType().equals(TimePartType.ZIPPER)) {
dimensions.add(
Dimension.builder().name(Constants.MATERIALIZATION_ZIPPER_START + materialization.getDateInfo())
.type(Constants.DIMENSION_TYPE_TIME)
.expr(Constants.MATERIALIZATION_ZIPPER_START + materialization.getDateInfo()).build());
dimensions.add(
Dimension.builder().name(Constants.MATERIALIZATION_ZIPPER_END + materialization.getDateInfo())
.type(Constants.DIMENSION_TYPE_TIME)
.expr(Constants.MATERIALIZATION_ZIPPER_END + materialization.getDateInfo()).build());
} else {
dimensions.add(Dimension.builder().name(materialization.getDateInfo()).expr(materialization.getDateInfo())
.type(Constants.DIMENSION_TYPE_TIME)
.build());
}
DataSource dataSource = DataSource.builder().sourceId(materialization.getDataBase())
.tableQuery(materialization.getDestinationTable())
.timePartType(materialization.getTimePartType())
.name("v_" + String.valueOf(materialization.getMaterializationId()))
.identifiers(Arrays.asList(identify))
.measures(metrics)
.dimensions(dimensions)
.build();
schema.getDatasource().put(dataSource.getName(), dataSource);
}
@Override
public String getSql() {
return null;
}
@Override
public String getSourceId() {
return null;
}
@Override
public HeadlessSchema findBest() {
return schema;
}
private void init() {
viewSchema = SchemaBuilder.getMaterializationSchema();
hepProgramBuilder = new HepProgramBuilder();
hepProgramBuilder.addRuleInstance(MaterializedViewRules.PROJECT_FILTER);
relOptPlanner = new HepPlanner(hepProgramBuilder.build());
calciteCatalogReader = new CalciteCatalogReader(
CalciteSchema.from(viewSchema.plus()),
CalciteSchema.from(viewSchema.plus()).path(null),
Configuration.typeFactory,
new CalciteConnectionConfigImpl(new Properties()));
relOptPlanner.addRelTraitDef(ConventionTraitDef.INSTANCE);
relOptPlanner.addRelTraitDef(RelDistributionTraitDef.INSTANCE);
EnumerableRules.rules().forEach(relOptPlanner::addRule);
RexBuilder rexBuilder = new RexBuilder(Configuration.typeFactory);
RelOptCluster relOptCluster = RelOptCluster.create(relOptPlanner, rexBuilder);
relBuilder = RelFactories.LOGICAL_BUILDER.create(relOptCluster, calciteCatalogReader);
}
private RexNode getRexNode(List<ImmutablePair<String, String>> timeRanges, String viewField) {
RexNode rexNode = null;
for (ImmutablePair<String, String> timeRange : timeRanges) {
if (rexNode == null) {
rexNode = getRexNodeByTimeRange(TimeRange.builder().start(timeRange.left).end(timeRange.right).build(),
viewField);
continue;
}
rexNode = relBuilder.call(SqlStdOperatorTable.OR, rexNode,
getRexNodeByTimeRange(TimeRange.builder().start(timeRange.left).end(timeRange.right).build(),
viewField));
}
return rexNode;
}
private RexNode getRexNode(Materialization materialization, String elem, String viewField) {
Optional<MaterializationElement> dim = materialization.getDimensions()
.stream().filter(d -> d.getName().equalsIgnoreCase(elem)).findFirst();
if (!dim.isPresent()) {
dim = materialization.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(elem)).findFirst();
}
RexNode rexNode = null;
if (dim.isPresent()) {
for (TimeRange timeRange : dim.get().getTimeRangeList()) {
if (rexNode == null) {
rexNode = getRexNodeByTimeRange(timeRange, viewField);
continue;
}
rexNode = relBuilder.call(SqlStdOperatorTable.OR, rexNode, getRexNodeByTimeRange(timeRange, viewField));
}
}
return rexNode;
}
private RexNode getRexNodeByTimeRange(TimeRange timeRange, String field) {
return relBuilder.call(SqlStdOperatorTable.AND,
relBuilder.call(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, relBuilder.field(field),
relBuilder.literal(timeRange.getStart())),
relBuilder.call(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, relBuilder.field(field),
relBuilder.literal(timeRange.getEnd())));
}
public boolean check(MetricReq metricCommand, Materialization materialization, String elem,
List<ImmutablePair<String, String>> timeRanges)
throws SqlParseException {
boolean isMatch = false;
try {
relBuilder.clear();
if (!CollectionUtils.isEmpty(relOptPlanner.getMaterializations())) {
relOptPlanner.clear();
}
String db = SchemaBuilder.MATERIALIZATION_SYS_DB;
RelBuilder viewBuilder = relBuilder.scan(Arrays.asList(db, SchemaBuilder.MATERIALIZATION_SYS_SOURCE));
RexNode viewFilter = getRexNode(materialization, elem, SchemaBuilder.MATERIALIZATION_SYS_FIELD_DATE);
if (viewFilter == null) {
return false;
}
RelNode viewRel = viewBuilder.filter(viewFilter).project(relBuilder.fields()).build();
log.debug("view {}", viewRel.explain());
List<String> view = Arrays.asList(db, SchemaBuilder.MATERIALIZATION_SYS_VIEW);
RelNode replacement = relBuilder.scan(view).build();
RelOptMaterialization relOptMaterialization = new RelOptMaterialization(replacement, viewRel, null, view);
relOptPlanner.addMaterialization(relOptMaterialization);
RelNode checkRel = relBuilder.scan(Arrays.asList(db, SchemaBuilder.MATERIALIZATION_SYS_SOURCE))
.filter(getRexNode(timeRanges, SchemaBuilder.MATERIALIZATION_SYS_FIELD_DATE))
.project(relBuilder.field(SchemaBuilder.MATERIALIZATION_SYS_FIELD_DATE)).build();
relOptPlanner.setRoot(checkRel);
RelNode optRel = relOptPlanner.findBestExp();
log.debug("findBestExp {}", optRel.explain());
isMatch = !extractTableNames(optRel).contains(SchemaBuilder.MATERIALIZATION_SYS_SOURCE);
} catch (Exception e) {
log.error("check error {}", e);
}
return isMatch;
}
public static Set<String> extractTableNames(RelNode relNode) {
Set<String> tableNames = new HashSet<>();
RelShuttle shuttle = new RelHomogeneousShuttle() {
public RelNode visit(TableScan scan) {
RelOptTable table = scan.getTable();
tableNames.addAll(table.getQualifiedName());
return scan;
}
};
relNode.accept(shuttle);
return tableNames;
}
}

View File

@@ -0,0 +1,17 @@
package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
public interface Planner {
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception;
public String getSql();
public String getSourceId();
public HeadlessSchema findBest();
}

View File

@@ -0,0 +1,21 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
public class Constants {
public static final String DIMENSION_IDENTIFY = "__";
public static final String DATASOURCE_TABLE_PREFIX = "src0_";
public static final String DATASOURCE_TABLE_FILTER_PREFIX = "src2_";
public static final String DATASOURCE_TABLE_OUT_PREFIX = "src00_";
public static final String JOIN_TABLE_PREFIX = "src1_";
public static final String JOIN_TABLE_OUT_PREFIX = "src11_";
public static final String JOIN_TABLE_LEFT_PREFIX = "src12_";
public static final String DIMENSION_TYPE_TIME_GRANULARITY_NONE = "none";
public static final String DIMENSION_TYPE_TIME = "time";
public static final String DIMENSION_ARRAY_SINGLE_SUFFIX = "_sgl";
public static final String MATERIALIZATION_ZIPPER_START = "start_";
public static final String MATERIALIZATION_ZIPPER_END = "end_";
public static final String SQL_PARSER_TABLE = "parsed_tb";
public static final String SQL_PARSER_DB = "parsed_db";
public static final String SQL_PARSER_FIELD = "parsed_field";
}

View File

@@ -0,0 +1,29 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.List;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class DataSource {
private String name;
private Long sourceId;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<Dimension> dimensions;
private List<Measure> measures;
private String aggTime;
private Materialization.TimePartType timePartType = Materialization.TimePartType.None;
}

View File

@@ -0,0 +1,55 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
JSON("JSON"),
VARCHAR("VARCHAR"),
DATE("DATE"),
BIGINT("BIGINT"),
INT("INT"),
DOUBLE("DOUBLE"),
FLOAT("FLOAT"),
DECIMAL("DECIMAL"),
UNKNOWN("unknown");
private String type;
DataType(String type) {
this.type = type;
}
public String getType() {
return type;
}
public static DataType of(String type) {
for (DataType typeEnum : DataType.values()) {
if (typeEnum.getType().equalsIgnoreCase(type)) {
return typeEnum;
}
}
return DataType.UNKNOWN;
}
public boolean isObject() {
return Arrays.asList(ARRAY, MAP, JSON).contains(this);
}
public boolean isArray() {
return ARRAY.equals(this);
}
}

View File

@@ -0,0 +1,25 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticItem;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class Dimension implements SemanticItem {
String name;
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
private DataType dataType = DataType.UNKNOWN;
@Override
public String getName() {
return name;
}
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import lombok.Data;
@Data
public class DimensionTimeTypeParams {
private String isPrimary;
private String timeGranularity;
}

View File

@@ -0,0 +1,18 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.Data;
@Data
public class HeadlessModel {
private String rootPath;
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataSource> datasourceMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
private List<Materialization> materializationList = new ArrayList<>();
private List<JoinRelation> joinRelations;
}

View File

@@ -0,0 +1,21 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
public enum Type {
PRIMARY, FOREIGN
}
private String name;
// primary or foreign
private String type;
}

View File

@@ -0,0 +1,17 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.List;
import lombok.Builder;
import lombok.Data;
import org.apache.commons.lang3.tuple.Triple;
@Data
@Builder
public class JoinRelation {
private String left;
private String right;
private String joinType;
private List<Triple<String, String, String>> joinCondition;
}

View File

@@ -0,0 +1,51 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.ArrayList;
import java.util.List;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class Materialization {
public enum TimePartType {
/**
* partition time type
* 1 - FULL, not use partition
* 2 - PARTITION , use time list
* 3 - ZIPPER, use [startDate, endDate] range time
*/
FULL("FULL"),
PARTITION("PARTITION"),
ZIPPER("ZIPPER"),
None("");
private String name;
TimePartType(String name) {
this.name = name;
}
public static TimePartType of(String name) {
for (TimePartType typeEnum : TimePartType.values()) {
if (typeEnum.name.equalsIgnoreCase(name)) {
return typeEnum;
}
}
return TimePartType.None;
}
}
private TimePartType timePartType;
private String destinationTable;
private String dateInfo;
private String entities;
private Long modelId;
private Long dataBase;
private Long materializationId;
private Integer level;
private List<MaterializationElement> dimensions = new ArrayList<>();
private List<MaterializationElement> metrics = new ArrayList<>();
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.List;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class MaterializationElement {
private List<TimeRange> timeRangeList;
private String name;
}

View File

@@ -0,0 +1,27 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class Measure {
private String name;
//sum max min avg count distinct
private String agg;
private String expr;
private String constraint;
private String alias;
private String createMetric;
}

View File

@@ -0,0 +1,22 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticItem;
import java.util.List;
import lombok.Data;
@Data
public class Metric implements SemanticItem {
private String name;
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
@Override
public String getName() {
return name;
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import java.util.List;
import lombok.Data;
@Data
public class MetricTypeParams {
private List<Measure> measures;
private String expr;
}

View File

@@ -0,0 +1,11 @@
package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class TimeRange {
private String start;
private String end;
}

View File

@@ -0,0 +1,131 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.DataContext;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.hint.RelHint;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.rel.type.StructKind;
import org.apache.calcite.schema.ScannableTable;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.Statistics;
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeName;
public class DataSourceTable extends AbstractTable implements ScannableTable, TranslatableTable {
private final String tableName;
private final List<String> fieldNames;
private final List<SqlTypeName> fieldTypes;
private final Statistic statistic;
private RelDataType rowType;
private DataSourceTable(String tableName, List<String> fieldNames, List<SqlTypeName> fieldTypes,
Statistic statistic) {
this.tableName = tableName;
this.fieldNames = fieldNames;
this.fieldTypes = fieldTypes;
this.statistic = statistic;
}
public static Builder newBuilder(String tableName) {
return new Builder(tableName);
}
public String getTableName() {
return tableName;
}
@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
if (rowType == null) {
List<RelDataTypeField> fields = new ArrayList<>(fieldNames.size());
for (int i = 0; i < fieldNames.size(); i++) {
RelDataType fieldType = typeFactory.createSqlType(fieldTypes.get(i));
RelDataTypeField field = new RelDataTypeFieldImpl(fieldNames.get(i), i, fieldType);
fields.add(field);
}
rowType = new RelRecordType(StructKind.PEEK_FIELDS, fields, true);
}
return rowType;
}
@Override
public Statistic getStatistic() {
return statistic;
}
@Override
public Enumerable<Object[]> scan(DataContext root) {
throw new UnsupportedOperationException("Not implemented");
}
public RelNode toRel(RelOptTable.ToRelContext toRelContext, RelOptTable relOptTable) {
List<RelHint> hint = new ArrayList<>();
return new LogicalTableScan(toRelContext.getCluster(), toRelContext.getCluster().traitSet(), hint, relOptTable);
}
public static final class Builder {
private final String tableName;
private final List<String> fieldNames = new ArrayList<>();
private final List<SqlTypeName> fieldTypes = new ArrayList<>();
private long rowCount;
private Builder(String tableName) {
if (tableName == null || tableName.isEmpty()) {
throw new IllegalArgumentException("Table name cannot be null or empty");
}
this.tableName = tableName;
}
public Builder addField(String name, SqlTypeName typeName) {
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Field name cannot be null or empty");
}
if (fieldNames.contains(name)) {
throw new IllegalArgumentException("Field already defined: " + name);
}
fieldNames.add(name);
fieldTypes.add(typeName);
return this;
}
public Builder withRowCount(long rowCount) {
this.rowCount = rowCount;
return this;
}
public DataSourceTable build() {
if (fieldNames.isEmpty()) {
throw new IllegalStateException("Table must have at least one field");
}
if (rowCount == 0L) {
throw new IllegalStateException("Table must have positive row count");
}
return new DataSourceTable(tableName, fieldNames, fieldTypes, Statistics.of(rowCount, null));
}
}
}

View File

@@ -0,0 +1,138 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.impl.AbstractSchema;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class HeadlessSchema extends AbstractSchema {
private final String rootPath;
private final Map<String, Table> tableMap;
private HeadlessModel headlessModel = new HeadlessModel();
private List<JoinRelation> joinRelations;
private RuntimeOptions runtimeOptions;
private HeadlessSchema(String rootPath, Map<String, Table> tableMap) {
this.rootPath = rootPath;
this.tableMap = tableMap;
}
public static Builder newBuilder(String rootPath) {
return new Builder(rootPath);
}
public String getRootPath() {
return rootPath;
}
public void setSemanticModel(HeadlessModel headlessModel) {
this.headlessModel = headlessModel;
}
public HeadlessModel getSemanticModel() {
return headlessModel;
}
@Override
public Map<String, Table> getTableMap() {
return tableMap;
}
@Override
public Schema snapshot(SchemaVersion version) {
return this;
}
public Map<String, DataSource> getDatasource() {
return headlessModel.getDatasourceMap();
}
public void setDatasource(Map<String, DataSource> datasource) {
headlessModel.setDatasourceMap(datasource);
}
public Map<String, List<Dimension>> getDimension() {
return headlessModel.getDimensionMap();
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
headlessModel.setDimensionMap(dimensions);
}
public List<Metric> getMetrics() {
return headlessModel.getMetrics();
}
public void setMetric(List<Metric> metric) {
headlessModel.setMetrics(metric);
}
public void setMaterializationList(List<Materialization> materializationList) {
headlessModel.setMaterializationList(materializationList);
}
public List<Materialization> getMaterializationList() {
return headlessModel.getMaterializationList();
}
public void setJoinRelations(List<JoinRelation> joinRelations) {
this.joinRelations = joinRelations;
}
public List<JoinRelation> getJoinRelations() {
return joinRelations;
}
public void setRuntimeOptions(RuntimeOptions runtimeOptions) {
this.runtimeOptions = runtimeOptions;
}
public RuntimeOptions getRuntimeOptions() {
return runtimeOptions;
}
public static final class Builder {
private final String rootPath;
private final Map<String, Table> tableMap = new HashMap<>();
private Builder(String rootPath) {
if (rootPath == null || rootPath.isEmpty()) {
throw new IllegalArgumentException("Schema name cannot be null or empty");
}
this.rootPath = rootPath;
}
public Builder addTable(DataSourceTable table) {
if (tableMap.containsKey(table.getTableName())) {
throw new IllegalArgumentException("Table already defined: " + table.getTableName());
}
tableMap.put(table.getTableName(), table);
return this;
}
public HeadlessSchema build() {
return new HeadlessSchema(rootPath, tableMap);
}
}
}

View File

@@ -0,0 +1,158 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import org.apache.calcite.sql.fun.SqlLibrary;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
public class HeadlessSqlConformance implements SqlConformance {
@Override
public boolean isLiberal() {
return SqlConformanceEnum.BIG_QUERY.isLiberal();
}
@Override
public boolean allowCharLiteralAlias() {
return SqlConformanceEnum.BIG_QUERY.allowCharLiteralAlias();
}
@Override
public boolean isGroupByAlias() {
return SqlConformanceEnum.BIG_QUERY.isGroupByAlias();
}
@Override
public boolean isGroupByOrdinal() {
return SqlConformanceEnum.BIG_QUERY.isGroupByOrdinal();
}
@Override
public boolean isHavingAlias() {
return false;
}
@Override
public boolean isSortByOrdinal() {
return SqlConformanceEnum.BIG_QUERY.isSortByOrdinal();
}
@Override
public boolean isSortByAlias() {
return SqlConformanceEnum.BIG_QUERY.isSortByAlias();
}
@Override
public boolean isSortByAliasObscures() {
return SqlConformanceEnum.BIG_QUERY.isSortByAliasObscures();
}
@Override
public boolean isFromRequired() {
return SqlConformanceEnum.BIG_QUERY.isFromRequired();
}
@Override
public boolean splitQuotedTableName() {
return SqlConformanceEnum.BIG_QUERY.splitQuotedTableName();
}
@Override
public boolean allowHyphenInUnquotedTableName() {
return SqlConformanceEnum.BIG_QUERY.allowHyphenInUnquotedTableName();
}
@Override
public boolean isBangEqualAllowed() {
return SqlConformanceEnum.BIG_QUERY.isBangEqualAllowed();
}
@Override
public boolean isPercentRemainderAllowed() {
return SqlConformanceEnum.BIG_QUERY.isPercentRemainderAllowed();
}
@Override
public boolean isMinusAllowed() {
return SqlConformanceEnum.BIG_QUERY.isMinusAllowed();
}
@Override
public boolean isApplyAllowed() {
return SqlConformanceEnum.BIG_QUERY.isApplyAllowed();
}
@Override
public boolean isInsertSubsetColumnsAllowed() {
return SqlConformanceEnum.BIG_QUERY.isInsertSubsetColumnsAllowed();
}
@Override
public boolean allowAliasUnnestItems() {
return SqlConformanceEnum.BIG_QUERY.allowAliasUnnestItems();
}
@Override
public boolean allowNiladicParentheses() {
return SqlConformanceEnum.BIG_QUERY.allowNiladicParentheses();
}
@Override
public boolean allowExplicitRowValueConstructor() {
return SqlConformanceEnum.BIG_QUERY.allowExplicitRowValueConstructor();
}
@Override
public boolean allowExtend() {
return SqlConformanceEnum.BIG_QUERY.allowExtend();
}
@Override
public boolean isLimitStartCountAllowed() {
return true;
}
@Override
public boolean isOffsetLimitAllowed() {
return false;
}
@Override
public boolean allowGeometry() {
return SqlConformanceEnum.BIG_QUERY.allowGeometry();
}
@Override
public boolean shouldConvertRaggedUnionTypesToVarying() {
return SqlConformanceEnum.BIG_QUERY.shouldConvertRaggedUnionTypesToVarying();
}
@Override
public boolean allowExtendedTrim() {
return SqlConformanceEnum.BIG_QUERY.allowExtendedTrim();
}
@Override
public boolean allowPluralTimeUnits() {
return SqlConformanceEnum.BIG_QUERY.allowPluralTimeUnits();
}
@Override
public boolean allowQualifyingCommonColumn() {
return SqlConformanceEnum.BIG_QUERY.allowQualifyingCommonColumn();
}
@Override
public boolean isValueAllowed() {
return false;
}
@Override
public SqlLibrary semantics() {
return SqlConformanceEnum.BIG_QUERY.semantics();
}
@Override
public boolean allowLenientCoercion() {
return false;
}
}

View File

@@ -0,0 +1,17 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import java.nio.charset.Charset;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
public class HeadlessSqlTypeFactoryImpl extends SqlTypeFactoryImpl {
public HeadlessSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) {
super(typeSystem);
}
@Override
public Charset getDefaultCharset() {
return Charset.forName("UTF8");
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import lombok.Builder;
import lombok.Data;
import org.apache.commons.lang3.tuple.Triple;
@Data
@Builder
public class RuntimeOptions {
private Triple<String, String, String> minMaxTime;
private Boolean enableOptimize;
}

View File

@@ -0,0 +1,91 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.sql.S2SQLSqlValidatorImpl;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.impl.AbstractSchema;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.validate.ParameterScope;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class SchemaBuilder {
public static final String MATERIALIZATION_SYS_DB = "SYS";
public static final String MATERIALIZATION_SYS_SOURCE = "SYS_SOURCE";
public static final String MATERIALIZATION_SYS_VIEW = "SYS_VIEW";
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
public static SqlValidatorScope getScope(HeadlessSchema schema) throws Exception {
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getRootPath(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
rootSchema,
Collections.singletonList(schema.getRootPath()),
Configuration.typeFactory,
Configuration.config
);
S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.validatorConfig);
return new ParameterScope(s2SQLSqlValidator, nameToTypeMap);
}
public static CalciteSchema getMaterializationSchema() {
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
SchemaPlus schema = rootSchema.plus().add(MATERIALIZATION_SYS_DB, new AbstractSchema());
DataSourceTable srcTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_SOURCE)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
schema.add(MATERIALIZATION_SYS_SOURCE, srcTable);
DataSourceTable viewTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_VIEW)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT)
.withRowCount(1)
.build();
schema.add(MATERIALIZATION_SYS_VIEW, viewTable);
return rootSchema;
}
public static void addSourceView(CalciteSchema viewSchema, String dbSrc, String tbSrc, Set<String> dates,
Set<String> dimensions, Set<String> metrics) {
String tb = tbSrc.toLowerCase();
String db = dbSrc.toLowerCase();
DataSourceTable.Builder builder = DataSourceTable.newBuilder(tb);
for (String date : dates) {
builder.addField(date.toLowerCase(), SqlTypeName.VARCHAR);
}
for (String dim : dimensions) {
builder.addField(dim.toLowerCase(), SqlTypeName.VARCHAR);
}
for (String metric : metrics) {
builder.addField(metric.toLowerCase(), SqlTypeName.BIGINT);
}
DataSourceTable srcTable = builder
.withRowCount(1)
.build();
if (Objects.nonNull(db) && !db.isEmpty()) {
SchemaPlus schemaPlus = viewSchema.plus().getSubSchema(db);
if (Objects.isNull(schemaPlus)) {
viewSchema.plus().add(db, new AbstractSchema());
schemaPlus = viewSchema.plus().getSubSchema(db);
}
schemaPlus.add(tb, srcTable);
} else {
viewSchema.add(tb, srcTable);
}
}
}

View File

@@ -0,0 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
public interface SemanticItem {
public String getName();
}

View File

@@ -0,0 +1,94 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.google.common.base.Preconditions;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.config.NullCollation;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIntervalLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.validate.SqlConformance;
import org.checkerframework.checker.nullness.qual.Nullable;
public class SemanticSqlDialect extends SqlDialect {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withNullCollation(NullCollation.LOW)
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new SemanticSqlDialect(DEFAULT_CONTEXT);
private static final SqlConformance tagTdwSqlConformance = new HeadlessSqlConformance();
public SemanticSqlDialect(Context context) {
super(context);
}
public static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
Preconditions.checkArgument(fetch != null || offset != null);
SqlWriter.Frame fetchFrame;
writer.newlineAndIndent();
fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("LIMIT");
boolean hasOffset = false;
if (offset != null) {
offset.unparse(writer, -1, -1);
hasOffset = true;
}
if (fetch != null) {
if (hasOffset) {
writer.keyword(",");
}
fetch.unparse(writer, -1, -1);
}
writer.endList(fetchFrame);
}
@Override
public void quoteStringLiteralUnicode(StringBuilder buf, String val) {
buf.append("'");
buf.append(val);
buf.append("'");
}
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
@Override
public boolean supportsCharSet() {
return false;
}
@Override
public boolean requiresAliasForFromItems() {
return true;
}
@Override
public SqlConformance getConformance() {
// mysql_5
return tagTdwSqlConformance;
}
public boolean supportsGroupByWithCube() {
return true;
}
public void unparseSqlIntervalLiteral(SqlWriter writer, SqlIntervalLiteral literal, int leftPrec, int rightPrec) {
}
public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
unparseFetchUsingAnsi(writer, offset, fetch);
}
}

View File

@@ -0,0 +1,17 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import java.util.List;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
public class ViewExpanderImpl implements RelOptTable.ViewExpander {
public ViewExpanderImpl() {
}
@Override
public RelRoot expandView(RelDataType rowType, String queryString, List<String> schemaPath,
List<String> viewPath) {
return null;
}
}

View File

@@ -0,0 +1,10 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
public interface Optimization {
public void visit(HeadlessSchema headlessSchema);
}

View File

@@ -0,0 +1,106 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MeasureNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Data
public abstract class Renderer {
protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataSource datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataSource datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)).findFirst();
}
public static Optional<Metric> getMetricByName(String name, HeadlessSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataSource datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
}
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg, String alias) throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(), SemanticNode.parse(m.getConstraint(), scope));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
metricNode.getAggNode().put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter()
.put(measure.get().getName(), SemanticNode.parse(measure.get().getConstraint(), scope));
}
}
return metricNode;
}
public static List<String> uniqList(List<String> list) {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -0,0 +1,14 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
import org.apache.calcite.sql.validate.SqlValidatorImpl;
public class S2SQLSqlValidatorImpl extends SqlValidatorImpl {
public S2SQLSqlValidatorImpl(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory, Config config) {
super(opTab, catalogReader, typeFactory, config);
}
}

View File

@@ -0,0 +1,51 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.parser.SqlParserPos;
@Data
public class TableView {
private List<SqlNode> filter = new ArrayList<>();
private List<SqlNode> dimension = new ArrayList<>();
private List<SqlNode> measure = new ArrayList<>();
private SqlNodeList order;
private SqlNode fetch;
private SqlNode offset;
private SqlNode table;
private String alias;
private List<String> primary;
private DataSource dataSource;
public SqlNode build() {
measure.addAll(dimension);
SqlNodeList dimensionNodeList = null;
if (dimension.size() > 0) {
dimensionNodeList = new SqlNodeList(getGroup(dimension), SqlParserPos.ZERO);
}
SqlNodeList filterNodeList = null;
if (filter.size() > 0) {
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO), table,
filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch, null);
}
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS) ? ((SqlBasicCall) s).getOperandList().get(0) : s))
.collect(
Collectors.toList());
}
}

View File

@@ -0,0 +1,26 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception {
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope);
}
return parse(agg + " ( " + name + " ) ", scope);
}
public static enum AggFunction {
AVG,
COUNT_DISTINCT,
MAX,
MIN,
SUM,
COUNT,
DISTINCT
}
}

View File

@@ -0,0 +1,382 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.extend.LateralViewExplodeNode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
@Slf4j
public class DataSourceNode extends SemanticNode {
public static SqlNode build(DataSource datasource, SqlValidatorScope scope) throws Exception {
String sqlTable = "";
if (datasource.getSqlQuery() != null && !datasource.getSqlQuery().isEmpty()) {
sqlTable = datasource.getSqlQuery();
} else if (datasource.getTableQuery() != null && !datasource.getTableQuery().isEmpty()) {
sqlTable = "select * from " + datasource.getTableQuery();
}
if (sqlTable.isEmpty()) {
throw new Exception("DatasourceNode build error [tableSqlNode not found]");
}
SqlNode source = getTable(sqlTable, scope);
addSchema(scope, datasource, source);
return buildAs(datasource.getName(), source);
}
private static void addSchema(SqlValidatorScope scope, DataSource datasource, SqlNode table) throws Exception {
Map<String, String> parseInfo = SemanticNode.getDbTable(table);
if (!parseInfo.isEmpty() && parseInfo.containsKey(Constants.SQL_PARSER_TABLE)) {
Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
String db = parseInfo.containsKey(Constants.SQL_PARSER_DB) ? parseInfo.get(Constants.SQL_PARSER_DB) : "";
String tb = parseInfo.get(Constants.SQL_PARSER_TABLE);
for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName());
}
if (parseInfo.containsKey(Constants.SQL_PARSER_FIELD)) {
for (String field : parseInfo.get(Constants.SQL_PARSER_FIELD).split(",")) {
dimensions.add(field);
}
}
for (Identify i : datasource.getIdentifiers()) {
dimensions.add(i.getName());
}
for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope), scope);
identifiers.stream().forEach(i -> {
if (!dimensions.contains(i.toString())) {
metrics.add(i.toString());
}
}
);
if (!dimensions.contains(m.getName())) {
metrics.add(m.getName());
}
}
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db,
tb, dateInfo,
dimensions, metrics);
}
}
public static SqlNode buildExtend(DataSource datasource, Set<String> exprList,
SqlValidatorScope scope)
throws Exception {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
SqlNode view = new SqlBasicCall(new LateralViewExplodeNode(), Arrays.asList(build(datasource, scope),
new SqlNodeList(getExtendField(exprList, scope), SqlParserPos.ZERO)), SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, view);
}
public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope) throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>();
for (String expr : exprList) {
sqlNodeList.add(parse(expr, scope));
sqlNodeList.add(new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO),
SqlParserPos.ZERO));
}
return sqlNodeList;
}
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseQuery();
scope.validateExpr(sqlNode);
return sqlNode;
}
public static String getNames(List<DataSource> dataSourceList) {
return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(HeadlessSchema schema, MetricReq metricCommand,
Set<String> queryDimension, List<String> measures) {
queryDimension.addAll(metricCommand.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName = schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream().forEach(mm -> measures.add(mm.getName())));
metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)).forEach(m -> measures.add(m));
}
public static void mergeQueryFilterDimensionMeasure(HeadlessSchema schema, MetricReq metricCommand,
Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope), filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = schema.getMetrics().stream()
.map(m -> m.getName()).collect(Collectors.toSet());
for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) {
schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
.forEach(mm -> queryMeasures.add(mm.getName())));
continue;
}
queryDimension.add(filterCondition);
}
measures.clear();
measures.addAll(queryMeasures);
}
}
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, HeadlessSchema schema,
MetricReq metricCommand) throws Exception {
List<DataSource> dataSources = new ArrayList<>();
// check by metric
List<String> measures = new ArrayList<>();
Set<String> queryDimension = new HashSet<>();
getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures);
DataSource baseDataSource = null;
// one , match measure count
Map<String, Integer> dataSourceMeasures = new HashMap<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
sourceMeasure.retainAll(measures);
dataSourceMeasures.put(entry.getKey(), sourceMeasure.size());
}
log.info("dataSourceMeasures [{}]", dataSourceMeasures);
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) {
baseDataSource = schema.getDatasource().get(base.get().getKey());
dataSources.add(baseDataSource);
}
// second , check match all dimension and metric
if (baseDataSource != null) {
Set<String> filterMeasure = new HashSet<>();
Set<String> sourceMeasure = baseDataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> dimension = baseDataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
baseDataSource.getIdentifiers().stream().forEach(i -> dimension.add(i.getName()));
if (schema.getDimension().containsKey(baseDataSource.getName())) {
schema.getDimension().get(baseDataSource.getName()).stream().forEach(d -> dimension.add(d.getName()));
}
filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension);
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope);
if (isAllMatch) {
log.info("baseDataSource match all ");
return dataSources;
}
// find all dataSource has the same identifiers
List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, measures,
baseDataSource, schema);
if (CollectionUtils.isEmpty(linkDataSources)) {
log.info("baseDataSource get by identifiers ");
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName())
.collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) {
throw new Exception("datasource error : " + baseDataSource.getName() + " miss identifier");
}
linkDataSources = getLinkDataSources(baseIdentifiers, queryDimension, measures,
baseDataSource, schema);
if (linkDataSources.isEmpty()) {
throw new Exception(
String.format("not find the match datasource : dimension[%s],measure[%s]", queryDimension,
measures));
}
}
log.debug("linkDataSources {}", linkDataSources);
return linkDataSources;
//dataSources.addAll(linkDataSources);
}
return dataSources;
}
private static boolean checkMatch(Set<String> sourceMeasure,
Set<String> queryDimension,
List<String> measures,
Set<String> dimension,
MetricReq metricCommand,
SqlValidatorScope scope) throws Exception {
boolean isAllMatch = true;
sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) {
log.info("baseDataSource not match all measure");
isAllMatch = false;
}
measures.removeAll(sourceMeasure);
dimension.retainAll(queryDimension);
if (dimension.size() < queryDimension.size()) {
log.info("baseDataSource not match all dimension");
isAllMatch = false;
}
queryDimension.removeAll(dimension);
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
Set<String> whereFields = new HashSet<>();
SqlNode sqlNode = parse(metricCommand.getWhere(), scope);
FilterNode.getFilterField(sqlNode, whereFields);
}
return isAllMatch;
}
private static List<DataSource> getLinkDataSourcesByJoinRelation(Set<String> queryDimension, List<String> measures,
DataSource baseDataSource, HeadlessSchema schema) {
Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>();
Set<String> before = new HashSet<>();
before.add(baseDataSource.getName());
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (!before.contains(joinRelation.getLeft()) && !before.contains(joinRelation.getRight())) {
continue;
}
boolean isMatch = false;
boolean isRight = before.contains(joinRelation.getLeft());
DataSource other = isRight ? schema.getDatasource().get(joinRelation.getRight())
: schema.getDatasource().get(joinRelation.getLeft());
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
Set<String> linkMeasure = other.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
}
if (!isMatch && schema.getDimension().containsKey(other.getName())) {
Set<String> linkDimension = schema.getDimension().get(other.getName()).stream()
.map(dd -> dd.getName())
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
if (isMatch) {
linkDataSourceName.add(other.getName());
before.add(other.getName());
}
}
}
if (!CollectionUtils.isEmpty(linkDataSourceName)) {
Map<String, Long> orders = new HashMap<>();
linkDataSourceName.add(baseDataSource.getName());
orders.put(baseDataSource.getName(), 0L);
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (linkDataSourceName.contains(joinRelation.getLeft()) && linkDataSourceName.contains(
joinRelation.getRight())) {
orders.put(joinRelation.getLeft(), 0L);
orders.put(joinRelation.getRight(), 1L);
}
}
orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> {
linkDataSources.add(schema.getDatasource().get(d.getKey()));
});
}
return linkDataSources;
}
private static List<DataSource> getLinkDataSources(Set<String> baseIdentifiers,
Set<String> queryDimension,
List<String> measures,
DataSource baseDataSource,
HeadlessSchema schema) {
Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
if (entry.getKey().equalsIgnoreCase(baseDataSource.getName())) {
continue;
}
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName())
.filter(i -> baseIdentifiers.contains(i)).count();
if (identifierNum > 0) {
boolean isMatch = false;
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
}
}
if (isMatch) {
linkDataSourceName.add(entry.getKey());
}
}
}
for (Map.Entry<String, List<Dimension>> entry : schema.getDimension().entrySet()) {
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
linkDataSourceName.add(entry.getKey());
}
}
}
for (String linkName : linkDataSourceName) {
linkDataSources.add(schema.getDatasource().get(linkName));
}
if (!CollectionUtils.isEmpty(linkDataSources)) {
List<DataSource> all = new ArrayList<>();
all.add(baseDataSource);
all.addAll(linkDataSources);
return all;
}
return Lists.newArrayList();
}
}

View File

@@ -0,0 +1,65 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
return buildAs(dimension.getName(), sqlNode);
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getName(), scope);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getExpr(), scope);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope) throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope);
}
SqlNode sqlNode = parse(dimension.getName(), scope);
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope);
}
public static List<SqlNode> expandArray(Dimension dimension, SqlValidatorScope scope)
throws Exception {
if (dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return Arrays.asList(buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope)));
}
throw new Exception("array dimension expr should only identify");
}
return expand(dimension, scope);
}
}

View File

@@ -0,0 +1,35 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlInternalOperator;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriter.Frame;
import org.apache.calcite.sql.SqlWriter.FrameTypeEnum;
public class ExtendNode extends SqlInternalOperator {
public ExtendNode() {
super(SqlKind.EXTEND.lowerName, SqlKind.EXTEND);
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
Frame frame = writer.startList(FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(operator.getName());
SqlNodeList list = (SqlNodeList) call.operand(1);
Frame frameArgs = writer.startList("(", ")");
for (int i = 0; i < list.size(); i++) {
list.get(i).unparse(writer, 0, 0);
if (i < list.size() - 1) {
writer.sep(",");
}
}
writer.endList(frameArgs);
writer.endList(frame);
}
}

View File

@@ -0,0 +1,27 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import java.util.Set;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
public class FilterNode extends SemanticNode {
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {
if (sqlNode instanceof SqlIdentifier) {
SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode;
fields.add(sqlIdentifier.names.get(0).toLowerCase());
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode operand : sqlBasicCall.getOperandList()) {
getFilterField(operand, fields);
}
}
}
public static boolean isMatchDataSource(Set<String> measures) {
return false;
}
}

View File

@@ -0,0 +1,41 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify.Type;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope) throws Exception {
return parse(identify.getName(), scope);
}
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {
return identifies.stream().filter(i -> type.name().equalsIgnoreCase(i.getType())).map(i -> i.getName())
.collect(Collectors.toSet());
}
public static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify = identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
if (identify.isPresent()) {
return Type.FOREIGN.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
public static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify = identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
if (identify.isPresent()) {
return Type.PRIMARY.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
@Data
public class JoinNode extends SemanticNode {
private SqlNode join;
private SqlNode on;
private SqlNode left;
private SqlNode right;
}

View File

@@ -0,0 +1,50 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode build(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
boolean addAgg = false;
if (!noAgg && measure.getAgg() != null && !measure.getAgg().isEmpty()) {
addAgg = true;
}
if (measure.getExpr() == null) {
if (addAgg) {
return parse(measure.getAgg() + " ( " + measure.getName() + " ) ", scope);
}
return parse(measure.getName(), scope);
} else {
if (addAgg) {
return buildAs(measure.getName(), parse(measure.getAgg() + " ( " + measure.getExpr() + " ) ", scope));
}
return buildAs(measure.getName(), parse(measure.getExpr(), scope));
}
}
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope) throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope));
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope);
}
return buildAs(measure.getName(), AggFunctionNode.build(measure.getAgg(), measure.getName(), scope));
}
public static SqlNode buildAggAs(String aggFunc, String name, SqlValidatorScope scope) throws Exception {
return buildAs(name, AggFunctionNode.build(aggFunc, name, scope));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope) throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope);
}
return parse(measure.getExpr(), scope);
}
}

View File

@@ -0,0 +1,29 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import java.util.HashMap;
import java.util.Map;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Data
public class MetricNode extends SemanticNode {
private Metric metric;
private Map<String, SqlNode> aggNode = new HashMap<>();
private Map<String, SqlNode> nonAggNode = new HashMap<>();
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope);
return buildAs(metric.getName(), sqlNode);
}
}

View File

@@ -0,0 +1,347 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.UnaryOperator;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.sql.JoinType;
import org.apache.calcite.sql.SqlAsOperator;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlBinaryOperator;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.SqlWith;
import org.apache.calcite.sql.SqlWriterConfig;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.pretty.SqlPrettyWriter;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.commons.lang3.StringUtils;
public abstract class SemanticNode {
public static Set<SqlKind> AGGREGATION_KIND = new HashSet<>();
public static Set<String> AGGREGATION_FUNC = new HashSet<>();
static {
AGGREGATION_KIND.add(SqlKind.AVG);
AGGREGATION_KIND.add(SqlKind.COUNT);
AGGREGATION_KIND.add(SqlKind.SUM);
AGGREGATION_KIND.add(SqlKind.MAX);
AGGREGATION_KIND.add(SqlKind.MIN);
AGGREGATION_KIND.add(SqlKind.OTHER_FUNCTION); // more
AGGREGATION_FUNC.add("sum");
AGGREGATION_FUNC.add("count");
AGGREGATION_FUNC.add("max");
AGGREGATION_FUNC.add("avg");
AGGREGATION_FUNC.add("min");
}
public static SqlNode parse(String expression, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode);
return sqlNode;
}
public static SqlNode buildAs(String asName, SqlNode sqlNode) throws Exception {
SqlAsOperator sqlAsOperator = new SqlAsOperator();
SqlIdentifier sqlIdentifier = new SqlIdentifier(asName, SqlParserPos.ZERO);
return new SqlBasicCall(sqlAsOperator, new ArrayList<>(Arrays.asList(sqlNode, sqlIdentifier)),
SqlParserPos.ZERO);
}
public static String getSql(SqlNode sqlNode) {
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(SemanticSqlDialect.DEFAULT)
.withKeywordsLowerCase(true).withClauseEndsLine(true).withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false).withUpdateSetListNewline(false).withIndentation(0);
UnaryOperator<SqlWriterConfig> sqlWriterConfigUnaryOperator = (c) -> config;
return sqlNode.toSqlString(sqlWriterConfigUnaryOperator).getSql();
}
public static boolean isNumeric(String expr) {
return StringUtils.isNumeric(expr);
}
public static List<SqlNode> expand(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (!isIdentifier(sqlNode)) {
List<SqlNode> sqlNodeList = new ArrayList<>();
expand(sqlNode, sqlNodeList);
return sqlNodeList;
}
return new ArrayList<>(Arrays.asList(sqlNode));
}
public static void expand(SqlNode sqlNode, List<SqlNode> sqlNodeList) {
if (sqlNode instanceof SqlIdentifier) {
sqlNodeList.add(sqlNode);
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode sqlNo : sqlBasicCall.getOperandList()) {
expand(sqlNo, sqlNodeList);
}
}
}
public static boolean isIdentifier(SqlNode sqlNode) {
return sqlNode instanceof SqlIdentifier;
}
public static SqlNode getAlias(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getKind().equals(SqlKind.AS) && sqlBasicCall.getOperandList().size() > 1) {
return sqlBasicCall.getOperandList().get(1);
}
}
if (sqlNode instanceof SqlIdentifier) {
return sqlNode;
}
return null;
}
public static Set<String> getSelect(SqlNode sqlNode) {
SqlNode table = getTable(sqlNode);
if (table instanceof SqlSelect) {
SqlSelect tableSelect = (SqlSelect) table;
return tableSelect.getSelectList().stream()
.map(s -> (s instanceof SqlIdentifier) ? ((SqlIdentifier) s).names.get(0)
: (((s instanceof SqlBasicCall) && s.getKind().equals(SqlKind.AS))
? ((SqlBasicCall) s).getOperandList().get(1).toString() : ""))
.collect(Collectors.toSet());
}
return new HashSet<>();
}
public static SqlNode getTable(SqlNode sqlNode) {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getOperator().getKind().equals(SqlKind.AS)) {
if (sqlBasicCall.getOperandList().get(0) instanceof SqlSelect) {
SqlSelect table = (SqlSelect) sqlBasicCall.getOperandList().get(0);
return table;
}
}
}
return sqlNode;
}
private static void sqlVisit(SqlNode sqlNode, Map<String, String> parseInfo) {
SqlKind kind = sqlNode.getKind();
switch (kind) {
case SELECT:
queryVisit(sqlNode, parseInfo);
break;
case AS:
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
sqlVisit(sqlBasicCall.getOperandList().get(0), parseInfo);
break;
case JOIN:
SqlJoin sqlJoin = (SqlJoin) sqlNode;
sqlVisit(sqlJoin.getLeft(), parseInfo);
sqlVisit(sqlJoin.getRight(), parseInfo);
break;
case UNION:
((SqlBasicCall) sqlNode).getOperandList().forEach(node -> {
sqlVisit(node, parseInfo);
});
break;
case WITH:
SqlWith sqlWith = (SqlWith) sqlNode;
sqlVisit(sqlWith.body, parseInfo);
break;
default:
break;
}
}
private static void queryVisit(SqlNode select, Map<String, String> parseInfo) {
if (select == null) {
return;
}
SqlSelect sqlSelect = (SqlSelect) select;
SqlNodeList selectList = sqlSelect.getSelectList();
selectList.getList().forEach(list -> {
fieldVisit(list, parseInfo, "");
});
fromVisit(sqlSelect.getFrom(), parseInfo);
}
private static void fieldVisit(SqlNode field, Map<String, String> parseInfo, String func) {
if (field == null) {
return;
}
SqlKind kind = field.getKind();
//System.out.println(kind);
// aggfunction
if (AGGREGATION_KIND.contains(kind)) {
SqlOperator sqlCall = ((SqlCall) field).getOperator();
if (AGGREGATION_FUNC.contains(sqlCall.toString().toLowerCase())) {
List<SqlNode> operandList = ((SqlBasicCall) field).getOperandList();
for (int i = 0; i < operandList.size(); i++) {
fieldVisit(operandList.get(i), parseInfo, sqlCall.toString().toUpperCase());
}
return;
}
}
if (kind.equals(SqlKind.IDENTIFIER)) {
addTagField(field.toString(), parseInfo, func);
return;
}
if (kind.equals(SqlKind.AS)) {
List<SqlNode> operandList1 = ((SqlBasicCall) field).getOperandList();
SqlNode left = operandList1.get(0);
fieldVisit(left, parseInfo, "");
return;
}
if (field instanceof SqlBasicCall) {
List<SqlNode> operandList = ((SqlBasicCall) field).getOperandList();
for (int i = 0; i < operandList.size(); i++) {
fieldVisit(operandList.get(i), parseInfo, "");
}
}
if (field instanceof SqlNodeList) {
((SqlNodeList) field).getList().forEach(node -> {
fieldVisit(node, parseInfo, "");
});
}
}
private static void addTagField(String exp, Map<String, String> parseInfo, String func) {
Set<String> fields = new HashSet<>();
for (String f : exp.split("[^\\w]+")) {
if (Pattern.matches("(?i)[a-z\\d_]+", f)) {
fields.add(f);
}
}
if (!fields.isEmpty()) {
parseInfo.put(Constants.SQL_PARSER_FIELD, fields.stream().collect(Collectors.joining(",")));
}
}
private static void fromVisit(SqlNode from, Map<String, String> parseInfo) {
SqlKind kind = from.getKind();
switch (kind) {
case IDENTIFIER:
SqlIdentifier sqlIdentifier = (SqlIdentifier) from;
addTableName(sqlIdentifier.toString(), parseInfo);
break;
case AS:
SqlBasicCall sqlBasicCall = (SqlBasicCall) from;
SqlNode selectNode1 = sqlBasicCall.getOperandList().get(0);
if (!SqlKind.UNION.equals(selectNode1.getKind())) {
if (!SqlKind.SELECT.equals(selectNode1.getKind())) {
addTableName(selectNode1.toString(), parseInfo);
}
}
sqlVisit(selectNode1, parseInfo);
break;
case JOIN:
SqlJoin sqlJoin = (SqlJoin) from;
sqlVisit(sqlJoin.getLeft(), parseInfo);
sqlVisit(sqlJoin.getRight(), parseInfo);
break;
case SELECT:
sqlVisit(from, parseInfo);
break;
default:
break;
}
}
private static void addTableName(String exp, Map<String, String> parseInfo) {
if (exp.indexOf(" ") > 0) {
return;
}
if (exp.indexOf("_") > 0) {
if (exp.split("_").length > 1) {
String[] dbTb = exp.split("\\.");
if (Objects.nonNull(dbTb) && dbTb.length > 0) {
parseInfo.put(Constants.SQL_PARSER_TABLE, dbTb.length > 1 ? dbTb[1] : dbTb[0]);
parseInfo.put(Constants.SQL_PARSER_DB, dbTb.length > 1 ? dbTb[0] : "");
}
}
}
}
public static Map<String, String> getDbTable(SqlNode sqlNode) {
Map<String, String> parseInfo = new HashMap<>();
sqlVisit(sqlNode, parseInfo);
return parseInfo;
}
public static RelNode getRelNode(CalciteSchema rootSchema, SqlToRelConverter sqlToRelConverter, String sql)
throws SqlParseException {
SqlValidator sqlValidator = Configuration.getSqlValidator(rootSchema);
return sqlToRelConverter.convertQuery(
sqlValidator.validate(SqlParser.create(sql, SqlParser.Config.DEFAULT).parseStmt()), false, true).rel;
}
public static SqlBinaryOperator getBinaryOperator(String val) {
if (val.equals("=")) {
return SqlStdOperatorTable.EQUALS;
}
if (val.equals(">")) {
return SqlStdOperatorTable.GREATER_THAN;
}
if (val.equals(">=")) {
return SqlStdOperatorTable.GREATER_THAN_OR_EQUAL;
}
if (val.equals("<")) {
return SqlStdOperatorTable.LESS_THAN;
}
if (val.equals("<=")) {
return SqlStdOperatorTable.LESS_THAN_OR_EQUAL;
}
if (val.equals("!=")) {
return SqlStdOperatorTable.NOT_EQUALS;
}
return SqlStdOperatorTable.EQUALS;
}
public static SqlLiteral getJoinSqlLiteral(String joinType) {
if (Objects.nonNull(joinType) && !joinType.isEmpty()) {
if (joinType.toLowerCase().contains(JoinType.INNER.lowerName)) {
return SqlLiteral.createSymbol(JoinType.INNER, SqlParserPos.ZERO);
}
if (joinType.toLowerCase().contains(JoinType.LEFT.lowerName)) {
return SqlLiteral.createSymbol(JoinType.LEFT, SqlParserPos.ZERO);
}
if (joinType.toLowerCase().contains(JoinType.RIGHT.lowerName)) {
return SqlLiteral.createSymbol(JoinType.RIGHT, SqlParserPos.ZERO);
}
if (joinType.toLowerCase().contains(JoinType.FULL.lowerName)) {
return SqlLiteral.createSymbol(JoinType.FULL, SqlParserPos.ZERO);
}
}
return SqlLiteral.createSymbol(JoinType.INNER, SqlParserPos.ZERO);
}
}

View File

@@ -0,0 +1,61 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node.extend;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.ExtendNode;
import java.util.Iterator;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
public class LateralViewExplodeNode extends ExtendNode {
public final String sqlNameView = "view";
public final String sqlNameExplode = "explode";
public LateralViewExplodeNode() {
super();
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
writer.setNeedWhitespace(true);
assert call.operandCount() == 2;
writer.sep(SqlKind.SELECT.lowerName);
writer.sep(SqlIdentifier.STAR.toString());
writer.sep("from");
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
SqlNodeList list = (SqlNodeList) call.operand(1);
Ord node;
Iterator var = Ord.zip(list).iterator();
while (var.hasNext()) {
node = (Ord) var.next();
if (node.i > 0 && node.i % 2 > 0) {
writer.sep(SqlKind.AS.lowerName);
((SqlNode) node.e).unparse(writer, 0, 0);
continue;
}
if (node.i > 0 && node.i % 2 == 0) {
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
}
explode(writer, (SqlNode) node.e);
}
writer.endList(frame);
}
public void explode(SqlWriter writer, SqlNode sqlNode) {
writer.sep(sqlNameExplode);
SqlWriter.Frame frame = writer.startList("(", ")");
sqlNode.unparse(writer, 0, 0);
writer.endList(frame);
writer.sep("tmp_sgl_" + sqlNode.toString());
}
}

View File

@@ -0,0 +1,100 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelRule;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.rules.FilterTableScanRule;
import org.apache.calcite.rel.rules.FilterTableScanRule.Config;
import org.apache.calcite.rel.rules.TransformationRule;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Pair;
import org.apache.commons.lang3.tuple.Triple;
public class FilterToGroupScanRule extends RelRule<Config>
implements TransformationRule {
public static FilterTableScanRule.Config DEFAULT = FilterTableScanRule.Config.DEFAULT.withOperandSupplier((b0) -> {
return b0.operand(LogicalFilter.class).oneInput((b1) -> {
return b1.operand(LogicalProject.class).oneInput((b2) -> {
return b2.operand(LogicalAggregate.class).oneInput((b3) -> {
return b3.operand(LogicalProject.class).anyInputs();
});
});
});
}).as(FilterTableScanRule.Config.class);
private HeadlessSchema headlessSchema;
public FilterToGroupScanRule(FilterTableScanRule.Config config, HeadlessSchema headlessSchema) {
super(config);
this.headlessSchema = headlessSchema;
}
public void onMatch(RelOptRuleCall call) {
if (call.rels.length != 4) {
return;
}
if (Objects.isNull(headlessSchema.getRuntimeOptions()) || Objects.isNull(
headlessSchema.getRuntimeOptions().getMinMaxTime()) || headlessSchema.getRuntimeOptions()
.getMinMaxTime().getLeft().isEmpty()) {
return;
}
Triple<String, String, String> minMax = headlessSchema.getRuntimeOptions().getMinMaxTime();
Filter filter = (Filter) call.rel(0);
Project project0 = (Project) call.rel(1);
Project project1 = (Project) call.rel(3);
Aggregate logicalAggregate = (Aggregate) call.rel(2);
Optional<Pair<RexNode, String>> isIn = project1.getNamedProjects()
.stream().filter(i -> i.right.equalsIgnoreCase(minMax.getLeft())).findFirst();
if (!isIn.isPresent()) {
return;
}
RelBuilder relBuilder = call.builder();
relBuilder.push(project1);
RexNode addPartitionCondition = getRexNodeByTimeRange(relBuilder, minMax.getLeft(), minMax.getMiddle(),
minMax.getRight());
relBuilder.filter(new RexNode[]{addPartitionCondition});
relBuilder.project(project1.getProjects());
ImmutableBitSet newGroupSet = logicalAggregate.getGroupSet();
int newGroupCount = newGroupSet.cardinality();
int groupCount = logicalAggregate.getGroupCount();
List<AggregateCall> newAggCalls = new ArrayList();
Iterator var = logicalAggregate.getAggCallList().iterator();
while (var.hasNext()) {
AggregateCall aggCall = (AggregateCall) var.next();
newAggCalls.add(
aggCall.adaptTo(project1, aggCall.getArgList(), aggCall.filterArg, groupCount, newGroupCount));
}
relBuilder.aggregate(relBuilder.groupKey(newGroupSet), newAggCalls);
relBuilder.project(project0.getProjects());
relBuilder.filter(new RexNode[]{filter.getCondition()});
call.transformTo(relBuilder.build());
}
private RexNode getRexNodeByTimeRange(RelBuilder relBuilder, String dateField, String start, String end) {
return relBuilder.call(SqlStdOperatorTable.AND,
relBuilder.call(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, relBuilder.field(dateField),
relBuilder.literal(start)),
relBuilder.call(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, relBuilder.field(dateField),
relBuilder.literal(end)));
}
}

View File

@@ -0,0 +1,68 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class FilterRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataSource dataSource : dataSources) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(), metricCommand.getDimensions(),
dataSource, schema, dimensions, metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
if (optionalMetric.isPresent()) {
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope));
}
}
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX, tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;
}
}
}

View File

@@ -0,0 +1,472 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.AggFunctionNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.IdentifyNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
@Slf4j
public class JoinRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
//dataSources = getOrderSource(dataSources);
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
Set<String> queryAllDimension = new HashSet<>();
List<String> measures = new ArrayList<>();
DataSourceNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures);
SqlNode left = null;
TableView leftTable = null;
TableView innerView = new TableView();
TableView filterView = new TableView();
Map<String, SqlNode> innerSelect = new HashMap<>();
Set<String> filterDimension = new HashSet<>();
Map<String, String> beforeSources = new HashMap<>();
for (int i = 0; i < dataSources.size(); i++) {
final DataSource dataSource = dataSources.get(i);
final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>();
final List<String> queryDimension = new ArrayList<>();
final List<String> queryMetrics = new ArrayList<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataSource, schema, filterDimensions,
filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric);
List<String> reqDimension = new ArrayList<>(metricCommand.getDimensions());
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataSource, sourceMeasure, scope, schema,
nonAgg);
Set<String> dimension = dataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataSource, dimension, scope,
schema);
List<String> primary = new ArrayList<>();
for (Identify identify : dataSource.getIdentifiers()) {
primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName());
}
}
List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataSource, dataSourceWhere);
TableView tableView = SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataSources.get(i), scope, schema, true);
log.info("tableView {}", tableView.getTable().toString());
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
tableView.setDataSource(dataSource);
if (left == null) {
leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope));
beforeSources.put(dataSource.getName(), leftTable.getAlias());
continue;
}
left = buildJoin(left, leftTable, tableView, beforeSources, dataSource, schema, scope);
leftTable = tableView;
beforeSources.put(dataSource.getName(), tableView.getAlias());
}
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) {
innerView.getMeasure().add(entry.getValue());
}
innerView.setTable(left);
filterView.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope));
}
}
}
super.tableView = filterView;
}
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, List<String> queryMetrics,
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure,
SemanticNode.buildAs(measure, SemanticNode.parse(alias + "." + measure, scope)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope)));
} else {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, List<String> queryDimension,
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
HeadlessSchema schema) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d,
SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + identifyDimension[1], scope)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope)));
}
filterDimension.add(d);
}
}
}
private Set<String> getQueryDimension(Set<String> filterDimension, Set<String> queryAllDimension,
Set<String> whereFields) {
return filterDimension.stream().filter(d -> queryAllDimension.contains(d) || whereFields.contains(d)).collect(
Collectors.toSet());
}
private boolean getMatchMetric(HeadlessSchema schema, Set<String> sourceMeasure, String m,
List<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream().filter(mm -> mm.getName().equalsIgnoreCase(m))
.findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
}
if (sourceMeasure.contains(m)) {
isAdd = true;
}
if (isAdd && !queryMetrics.contains(m)) {
queryMetrics.add(m);
}
return isAdd;
}
private boolean getMatchDimension(HeadlessSchema schema, Set<String> sourceDimension, DataSource dataSource,
String d, List<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
oriDimension = d.split(Constants.DIMENSION_IDENTIFY)[1];
}
if (sourceDimension.contains(oriDimension)) {
isAdd = true;
}
for (Identify identify : dataSource.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
break;
}
}
if (schema.getDimension().containsKey(dataSource.getName())) {
for (Dimension dim : schema.getDimension().get(dataSource.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
}
}
}
if (isAdd && !queryDimension.contains(oriDimension)) {
queryDimension.add(oriDimension);
}
return isAdd;
}
private SqlNode getTable(TableView tableView, SqlValidatorScope scope) throws Exception {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before,
DataSource dataSource,
HeadlessSchema schema, SqlValidatorScope scope)
throws Exception {
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope);
condition = joinRelationCondition;
}
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType())
|| Materialization.TimePartType.ZIPPER.equals(tableView.getDataSource().getTimePartType())) {
SqlNode zipperCondition = getZipperCondition(leftTable, tableView, dataSource, schema, scope);
if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
SqlParserPos.ZERO, null);
} else {
condition = zipperCondition;
}
}
return new SqlJoin(
SqlParserPos.ZERO,
left,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO),
sqlLiteral,
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO),
condition
);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView, HeadlessSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataSource().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight())).collect(
Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation,
SqlValidatorScope scope) throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope));
ons.add(SemanticNode.parse(con.getRight(), scope));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
ons,
SqlParserPos.ZERO, null);
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO, null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, HeadlessSchema schema,
SqlValidatorScope scope) throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!SourceRender.isDimension(on, dataSource, schema)) {
continue;
}
if (IdentifyNode.isForeign(on, left.getDataSource().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, right.getDataSource().getIdentifiers())) {
continue;
}
}
if (IdentifyNode.isForeign(on, right.getDataSource().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, left.getDataSource().getIdentifiers())) {
continue;
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope));
if (condition == null) {
condition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO, null);
}
return condition;
}
private static void joinOrder(int cnt, String id, Map<String, Set<String>> next, Queue<String> orders,
Map<String, Boolean> visited) {
visited.put(id, true);
orders.add(id);
if (orders.size() >= cnt) {
return;
}
for (String nextId : next.get(id)) {
if (!visited.get(nextId)) {
joinOrder(cnt, nextId, next, orders, visited);
if (orders.size() >= cnt) {
return;
}
}
}
orders.poll();
visited.put(id, false);
}
private void addZipperField(DataSource dataSource, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) {
dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())
) {
fields.add(t.getName());
}
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())
) {
fields.add(t.getName());
}
});
}
}
private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, HeadlessSchema schema,
SqlValidatorScope scope) throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
&& Materialization.TimePartType.ZIPPER.equals(right.getDataSource().getTimePartType())) {
throw new Exception("not support two zipper table");
}
SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
Optional<Dimension> rightTime = right.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) {
String startTime = "";
String endTime = "";
String dateTime = "";
Optional<Dimension> startTimeOp =
(Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) ? left : right)
.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)).findFirst();
Optional<Dimension> endTimeOp =
(Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) ? left : right)
.getDataSource().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)).findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? left : right;
TableView partMetric =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? right : left;
Optional<Dimension> partTime =
Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
? rightTime : leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(new SqlBasicCall(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(startTime, scope),
SemanticNode.parse(dateTime, scope))),
SqlParserPos.ZERO, null), new SqlBasicCall(
SqlStdOperatorTable.GREATER_THAN,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(endTime, scope),
SemanticNode.parse(dateTime, scope))),
SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null);
}
return condition;
}
}

View File

@@ -0,0 +1,51 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
public class OutputRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope));
}
for (String metric : metricCommand.getMetrics()) {
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope));
}
if (metricCommand.getLimit() > 0) {
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope)}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope));
}
}
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));
}
}
}

View File

@@ -0,0 +1,338 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DimensionNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.IdentifyNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@Slf4j
public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions,
String queryWhere, DataSource datasource, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
List<String> queryMetrics = new ArrayList<>(reqMetrics);
List<String> queryDimensions = new ArrayList<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres);
Set<String> extendFields = new HashSet<>();
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics);
queryMetrics.addAll(metrics);
queryMetrics = uniqList(queryMetrics);
queryDimensions.addAll(dimensions);
queryDimensions = uniqList(queryDimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope,
schema, nonAgg);
}
addTimeDimension(datasource, queryDimensions);
for (String metric : queryMetrics) {
MetricNode metricNode = buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream().forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream().forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream().forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY) && queryDimensions.contains(
dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
continue;
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension, datasource, schema, nonAgg, extendFields, dataSet, output,
scope);
}
SqlNode tableNode = DataSourceNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() + "_" + UUID.randomUUID().toString()
.substring(32), dataSet.build()));
return output;
}
private static void buildDimension(String alias, String dimension, DataSource datasource, HeadlessSchema schema,
boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope)
throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.build(dim, scope));
if (nonAgg) {
//dataSet.getMeasure().addAll(DimensionNode.expand(dim, scope));
output.getMeasure().add(DimensionNode.buildName(dim, scope));
isAdd = true;
continue;
}
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope));
} else {
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope));
}
isAdd = true;
break;
}
}
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
} else {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope));
}
isAdd = true;
}
}
if (isAdd) {
return;
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope));
return;
}
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope));
}
}
private static boolean isWhereHasMetric(List<String> fields, DataSource datasource) {
Long metricNum = datasource.getMeasures().stream().filter(m -> fields.contains(m.getName().toLowerCase()))
.count();
Long measureNum = datasource.getMeasures().stream().filter(m -> fields.contains(m.getName().toLowerCase()))
.count();
return metricNum > 0 || measureNum > 0;
}
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
HeadlessSchema schema,
boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
iterator.remove();
}
}
for (String where : fields) {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope));
isAdd = true;
}
if (isAdd) {
continue;
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
}
}
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
List<String> queryMetrics,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
HeadlessSchema schema,
boolean nonAgg) throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource,
scope, schema,
nonAgg);
dataSet.getMeasure().addAll(whereNode);
//getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, HeadlessSchema schema, Set<String> dimensions,
Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
continue;
}
String filterField = field;
if (field.contains(Constants.DIMENSION_IDENTIFY)) {
filterField = field.split(Constants.DIMENSION_IDENTIFY)[1];
}
addField(filterField, field, datasource, schema, dimensions, metrics);
}
}
private static void addField(String field, String oriField, DataSource datasource, HeadlessSchema schema,
Set<String> dimensions,
Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) {
dimensions.add(oriField);
return;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
if (identify.isPresent()) {
dimensions.add(oriField);
return;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(oriField);
return;
}
}
Optional<Measure> metric = datasource.getMeasures()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (metric.isPresent()) {
metrics.add(oriField);
return;
}
Optional<Metric> datasourceMetric = schema.getMetrics()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (datasourceMetric.isPresent()) {
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures().stream()
.map(m -> m.getName()).collect(
Collectors.toSet());
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
.containsAll(measures)) {
metrics.add(oriField);
return;
}
}
}
public static boolean isDimension(String name, DataSource datasource, HeadlessSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static void addTimeDimension(DataSource dataSource, List<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) {
Optional<Dimension> startTimeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)).findFirst();
Optional<Dimension> endTimeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)).findFirst();
if (startTimeOp.isPresent() && !queryDimension.contains(startTimeOp.get().getName())) {
queryDimension.add(startTimeOp.get().getName());
}
if (endTimeOp.isPresent() && !queryDimension.contains(endTimeOp.get().getName())) {
queryDimension.add(endTimeOp.get().getName());
}
} else {
Optional<Dimension> timeOp = dataSource.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())).findFirst();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
queryDimension.add(timeOp.get().getName());
}
}
}
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
HeadlessSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataSources.size() == 1) {
DataSource dataSource = dataSources.get(0);
super.tableView = renderOne("", fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(),
metricCommand.getWhere(), dataSource, scope, schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(metricCommand, dataSources, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -0,0 +1,470 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.pojo.MetricTable;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.response.DatabaseResp;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.service.HeadlessQueryEngine;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import com.tencent.supersonic.headless.server.pojo.EngineTypeEnum;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("CalculateAggConverter")
@Slf4j
public class CalculateAggConverter implements HeadlessConverter {
private final HeadlessQueryEngine parserService;
private final QueryStructUtils queryStructUtils;
private final SqlGenerateUtils sqlGenerateUtils;
private final Catalog catalog;
@Value("${metricParser.agg.default:sum}")
private String metricAggDefault;
public CalculateAggConverter(
HeadlessQueryEngine parserService,
@Lazy QueryStructUtils queryStructUtils,
SqlGenerateUtils sqlGenerateUtils, Catalog catalog) {
this.parserService = parserService;
this.queryStructUtils = queryStructUtils;
this.sqlGenerateUtils = sqlGenerateUtils;
this.catalog = catalog;
}
public interface EngineSql {
String sql(QueryStructReq queryStructCmd, boolean isOver, boolean asWith, String metricSql);
}
public ParseSqlReq generateSqlCommend(QueryStructReq queryStructCmd, EngineTypeEnum engineTypeEnum, String version)
throws Exception {
// 同环比
if (isRatioAccept(queryStructCmd)) {
return generateRatioSqlCommand(queryStructCmd, engineTypeEnum, version);
}
ParseSqlReq sqlCommand = new ParseSqlReq();
sqlCommand.setRootPath(catalog.getModelFullPath(queryStructCmd.getModelIds()));
String metricTableName = "v_metric_tb_tmp";
MetricTable metricTable = new MetricTable();
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryStructCmd.getMetrics());
metricTable.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommand, complete where:{}", where);
metricTable.setWhere(where);
metricTable.setAggOption(AggOption.AGGREGATION);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
String sql = String.format("select %s from %s %s %s %s", sqlGenerateUtils.getSelect(queryStructCmd),
metricTableName,
sqlGenerateUtils.getGroupBy(queryStructCmd), sqlGenerateUtils.getOrderBy(queryStructCmd),
sqlGenerateUtils.getLimit(queryStructCmd));
if (!queryStructUtils.isSupportWith(engineTypeEnum, version)) {
sqlCommand.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s", sqlGenerateUtils.getSelect(queryStructCmd),
metricTableName,
sqlGenerateUtils.getGroupBy(queryStructCmd), sqlGenerateUtils.getOrderBy(queryStructCmd),
sqlGenerateUtils.getLimit(queryStructCmd));
}
sqlCommand.setSql(sql);
return sqlCommand;
}
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryStructReq()) || queryStatement.getIsS2SQL()) {
return false;
}
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
if (queryStructCmd.getQueryType().isNativeAggQuery()) {
return false;
}
if (CollectionUtils.isEmpty(queryStructCmd.getAggregators())) {
return false;
}
int nonSumFunction = 0;
for (Aggregator agg : queryStructCmd.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false;
}
if (agg.getFunc().equals(AggOperatorEnum.UNKNOWN)) {
return false;
}
if (agg.getFunc() != null) {
nonSumFunction++;
}
}
return nonSumFunction > 0;
}
@Override
public void converter(Catalog catalog, QueryStatement queryStatement) throws Exception {
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
ParseSqlReq sqlCommend = queryStatement.getParseSqlReq();
DatabaseResp databaseResp = catalog.getDatabaseByModelId(queryStructCmd.getModelIds().get(0));
ParseSqlReq parseSqlReq = generateSqlCommend(queryStructCmd,
EngineTypeEnum.valueOf(databaseResp.getType().toUpperCase()), databaseResp.getVersion());
sqlCommend.setSql(parseSqlReq.getSql());
sqlCommend.setTables(parseSqlReq.getTables());
sqlCommend.setRootPath(parseSqlReq.getRootPath());
sqlCommend.setVariables(parseSqlReq.getVariables());
sqlCommend.setSupportWith(parseSqlReq.isSupportWith());
}
/**
* Ratio
*/
public boolean isRatioAccept(QueryStructReq queryStructCmd) {
Long ratioFuncNum = queryStructCmd.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) || f.getFunc()
.equals(AggOperatorEnum.RATIO_OVER))).count();
if (ratioFuncNum > 0) {
return true;
}
return false;
}
public ParseSqlReq generateRatioSqlCommand(QueryStructReq queryStructCmd, EngineTypeEnum engineTypeEnum,
String version)
throws Exception {
check(queryStructCmd);
ParseSqlReq sqlCommand = new ParseSqlReq();
sqlCommand.setRootPath(catalog.getModelFullPath(queryStructCmd.getModelIds()));
String metricTableName = "v_metric_tb_tmp";
MetricTable metricTable = new MetricTable();
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryStructCmd.getMetrics());
metricTable.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommend, complete where:{}", where);
metricTable.setWhere(where);
metricTable.setAggOption(AggOption.AGGREGATION);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
boolean isOver = isOverRatio(queryStructCmd);
String sql = "";
switch (engineTypeEnum) {
case H2:
sql = new H2EngineSql().sql(queryStructCmd, isOver, true, metricTableName);
break;
case MYSQL:
case DORIS:
case CLICKHOUSE:
if (!queryStructUtils.isSupportWith(engineTypeEnum, version)) {
sqlCommand.setSupportWith(false);
}
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(queryStructCmd, isOver, sqlCommand.isSupportWith(), metricTableName);
} else {
sql = new CkEngineSql().sql(queryStructCmd, isOver, sqlCommand.isSupportWith(), metricTableName);
}
break;
default:
}
sqlCommand.setSql(sql);
return sqlCommand;
}
public class H2EngineSql implements EngineSql {
public String getOverSelect(QueryStructReq queryStructCmd, boolean isOver) {
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
f.getFunc().getOperator(), f.getColumn());
} else {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
public String getTimeSpan(QueryStructReq queryStructCmd, boolean isOver, boolean isAdd) {
if (Objects.nonNull(queryStructCmd.getDateInfo())) {
String addStr = isAdd ? "" : "-";
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.DAY)) {
return "day," + (isOver ? addStr + "7" : addStr + "1");
}
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.WEEK)) {
return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
}
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.MONTH)) {
return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
}
}
return "";
}
public String getJoinOn(QueryStructReq queryStructCmd, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryStructCmd);
String timeSpan = getTimeSpan(queryStructCmd, isOver, true);
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim);
}
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(queryStructCmd, isOver, false), aliasLeft + timeDim, aliasRight + timeDim);
}
return String.format("%s = TIMESTAMPADD(%s,%s) ",
aliasLeft + timeDim, timeSpan, aliasRight + timeDim);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryStructCmd.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@Override
public String sql(QueryStructReq queryStructCmd, boolean isOver, boolean asWith, String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryStructCmd, isOver), getAllSelect(queryStructCmd, "t0."),
getAllJoinSelect(queryStructCmd, "t1."), metricSql, metricSql,
getJoinOn(queryStructCmd, isOver, "t0.", "t1."),
getOrderBy(queryStructCmd), getLimit(queryStructCmd));
return sql;
}
}
public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(QueryStructReq queryStructCmd, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryStructCmd);
String timeSpan = "INTERVAL " + getTimeSpan(queryStructCmd, isOver, true);
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format("toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryStructCmd, isOver, false), aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryStructCmd.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@Override
public String sql(QueryStructReq queryStructCmd, boolean isOver, boolean asWith, String metricSql) {
if (!asWith) {
return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryStructCmd, isOver), getAllSelect(queryStructCmd, "t0."),
getAllJoinSelect(queryStructCmd, "t1."), metricSql, metricSql,
getJoinOn(queryStructCmd, isOver, "t0.", "t1."),
getOrderBy(queryStructCmd), getLimit(queryStructCmd));
}
return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(queryStructCmd, isOver), getAllSelect(queryStructCmd, "t0."),
getAllJoinSelect(queryStructCmd, "t1."),
getJoinOn(queryStructCmd, isOver, "t0.", "t1."),
getOrderBy(queryStructCmd), getLimit(queryStructCmd));
}
}
public class MysqlEngineSql implements EngineSql {
public String getTimeSpan(QueryStructReq queryStructCmd, boolean isOver, boolean isAdd) {
if (Objects.nonNull(queryStructCmd.getDateInfo())) {
String addStr = isAdd ? "" : "-";
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.DAY)) {
return isOver ? addStr + "7 day" : addStr + "1 day";
}
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.WEEK)) {
return isOver ? addStr + "1 month" : addStr + "7 day";
}
if (queryStructCmd.getDateInfo().getPeriod().equalsIgnoreCase(Constants.MONTH)) {
return isOver ? addStr + "1 year" : addStr + "1 month";
}
}
return "";
}
public String getOverSelect(QueryStructReq queryStructCmd, boolean isOver) {
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format(
"if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
f.getColumn(), f.getFunc().getOperator(), f.getColumn());
} else {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
public String getJoinOn(QueryStructReq queryStructCmd, boolean isOver, String aliasLeft, String aliasRight) {
String timeDim = getTimeDim(queryStructCmd);
String timeSpan = "INTERVAL " + getTimeSpan(queryStructCmd, isOver, true);
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.MONTH)) {
return String.format("%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (queryStructCmd.getDateInfo().getPeriod().equals(Constants.WEEK) && isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryStructCmd, isOver, false), aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} else {
return f.getColumn();
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : queryStructCmd.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
groups.add(aliasLeft + group + " = " + aliasRight + group);
}
return CollectionUtils.isEmpty(groups) ? aggStr
: String.join(" and ", groups) + " and " + aggStr + " ";
}
@Override
public String sql(QueryStructReq queryStructCmd, boolean isOver, boolean asWith, String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryStructCmd, isOver), getAllSelect(queryStructCmd, "t0."),
getAllJoinSelect(queryStructCmd, "t1."), metricSql, metricSql,
getJoinOn(queryStructCmd, isOver, "t0.", "t1."),
getOrderBy(queryStructCmd), getLimit(queryStructCmd));
return sql;
}
}
private String getAllJoinSelect(QueryStructReq queryStructCmd, String alias) {
String aggStr = queryStructCmd.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "")
+ "_roll")
.collect(Collectors.joining(","));
List<String> groups = new ArrayList<>();
for (String group : queryStructCmd.getGroups()) {
groups.add(alias + group + " as " + group + "_roll");
}
return CollectionUtils.isEmpty(groups) ? aggStr
: String.join(",", groups) + "," + aggStr;
}
private String getGroupDimWithOutTime(QueryStructReq queryStructCmd) {
String timeDim = getTimeDim(queryStructCmd);
return queryStructCmd.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
private static String getTimeDim(QueryStructReq queryStructCmd) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
return dateModeUtils.getSysDateCol(queryStructCmd.getDateInfo());
}
private static String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd != null && queryStructCmd.getLimit() > 0) {
return " limit " + String.valueOf(queryStructCmd.getLimit());
}
return "";
}
private String getAllSelect(QueryStructReq queryStructCmd, String alias) {
String aggStr = queryStructCmd.getAggregators().stream().map(f -> getSelectField(f, alias))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: alias + String.join("," + alias, queryStructCmd.getGroups()) + "," + aggStr;
}
private String getSelectField(final Aggregator agg, String alias) {
if (agg.getFunc().equals(AggOperatorEnum.RATIO_OVER) || agg.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return alias + agg.getColumn();
}
return sqlGenerateUtils.getSelectField(agg);
}
private String getGroupBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
return "";
}
return "group by " + String.join(",", queryStructCmd.getGroups());
}
private static String getOrderBy(QueryStructReq queryStructCmd) {
return "order by " + getTimeDim(queryStructCmd) + " desc";
}
private boolean isOverRatio(QueryStructReq queryStructCmd) {
Long overCt = queryStructCmd.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
return overCt > 0;
}
private void check(QueryStructReq queryStructCmd) throws Exception {
Long ratioOverNum = queryStructCmd.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = queryStructCmd.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together ");
}
if (getTimeDim(queryStructCmd).isEmpty()) {
throw new Exception("miss time filter");
}
}
}

View File

@@ -0,0 +1,59 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Slf4j
@Component("DefaultDimValueConverter")
public class DefaultDimValueConverter implements HeadlessConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryStructReq()) || queryStatement.getIsS2SQL()) {
return false;
}
return true;
}
@Override
public void converter(Catalog catalog, QueryStatement queryStatement) {
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
MetaFilter metaFilter = new MetaFilter(queryStructCmd.getModelIds());
List<DimensionResp> dimensionResps = catalog.getDimensions(metaFilter);
//dimension which has default values
dimensionResps = dimensionResps.stream()
.filter(dimensionResp -> !CollectionUtils.isEmpty(dimensionResp.getDefaultValues()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensionResps)) {
return;
}
log.info("dimension with default values:{}, queryStruct:{}", dimensionResps, queryStructCmd);
//add dimension default value to filter
List<String> dimensionFilterBizName = queryStructCmd.getDimensionFilters().stream()
.map(Filter::getBizName).collect(Collectors.toList());
for (DimensionResp dimensionResp : dimensionResps) {
if (!dimensionFilterBizName.contains(dimensionResp.getBizName())) {
Filter filter = new Filter();
filter.setBizName(dimensionResp.getBizName());
filter.setValue(dimensionResp.getDefaultValues());
filter.setOperator(FilterOperatorEnum.IN);
filter.setName(dimensionResp.getName());
queryStructCmd.getDimensionFilters().add(filter);
}
}
}
}

View File

@@ -0,0 +1,86 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@Component("MetricCheckConverter")
@Slf4j
public class MetricCheckConverter implements HeadlessConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryStructReq()) || queryStatement.getIsS2SQL()) {
return false;
}
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
if (queryStructCmd.getQueryType().isNativeAggQuery()) {
return false;
}
return !CollectionUtils.isEmpty(queryStructCmd.getAggregators());
}
@Override
public void converter(Catalog catalog, QueryStatement queryStatement) {
QueryStructReq queryStructReq = queryStatement.getQueryStructReq();
MetaFilter metaFilter = new MetaFilter(queryStructReq.getModelIds());
List<MetricResp> metricResps = catalog.getMetrics(metaFilter);
List<DimensionResp> dimensionResps = catalog.getDimensions(metaFilter);
Map<Long, DimensionResp> dimensionMap = dimensionResps.stream()
.collect(Collectors.toMap(DimensionResp::getId, d -> d));
List<String> metricBizNames = queryStructReq.getMetrics();
List<String> dimensionFilterBizNames = queryStructReq.getDimensionFilters().stream()
.map(Filter::getBizName).collect(Collectors.toList());
List<MetricResp> metricToQuery = metricResps.stream().filter(metricResp ->
metricBizNames.contains(metricResp.getBizName())).collect(Collectors.toList());
List<Long> dimensionToFilter = dimensionResps.stream().filter(dimensionResp ->
dimensionFilterBizNames.contains(dimensionResp.getBizName()))
.map(DimensionResp::getId).collect(Collectors.toList());
for (MetricResp metricResp : metricToQuery) {
Set<Long> necessaryDimensionIds = metricResp.getNecessaryDimensionIds();
if (CollectionUtils.isEmpty(necessaryDimensionIds)) {
continue;
}
DimensionResp dimensionResp = null;
for (Long dimensionId : necessaryDimensionIds) {
dimensionResp = dimensionMap.get(dimensionId);
if (dimensionResp != null) {
break;
}
}
if (dimensionResp == null) {
continue;
}
String message = String.format("该指标必须配合维度[%s]来进行过滤查询", dimensionResp.getName());
if (CollectionUtils.isEmpty(dimensionToFilter)) {
throw new InvalidArgumentException(message);
}
boolean flag = false;
for (Long dimensionId : dimensionToFilter) {
if (necessaryDimensionIds.contains(dimensionId)) {
flag = true;
break;
}
}
if (!flag) {
throw new InvalidArgumentException(message);
}
}
}
}

View File

@@ -0,0 +1,80 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.common.core.pojo.Param;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("ParserDefaultConverter")
@Slf4j
public class ParserDefaultConverter implements HeadlessConverter {
private final CalculateAggConverter calculateCoverterAgg;
private final QueryStructUtils queryStructUtils;
public ParserDefaultConverter(
CalculateAggConverter calculateCoverterAgg,
QueryStructUtils queryStructUtils) {
this.calculateCoverterAgg = calculateCoverterAgg;
this.queryStructUtils = queryStructUtils;
}
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryStructReq()) || queryStatement.getIsS2SQL()) {
return false;
}
return !calculateCoverterAgg.accept(queryStatement);
}
@Override
public void converter(Catalog catalog, QueryStatement queryStatement) throws Exception {
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
MetricReq metricCommand = queryStatement.getMetricReq();
MetricReq metricReq = generateSqlCommand(catalog, queryStructCmd);
queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructCmd));
BeanUtils.copyProperties(metricReq, metricCommand);
}
public MetricReq generateSqlCommand(Catalog catalog, QueryStructReq queryStructCmd) {
MetricReq sqlCommend = new MetricReq();
sqlCommend.setMetrics(queryStructCmd.getMetrics());
sqlCommend.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommend, complete where:{}", where);
sqlCommend.setWhere(where);
sqlCommend.setOrder(queryStructCmd.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection())).collect(Collectors.toList()));
sqlCommend.setVariables(queryStructCmd.getParams().stream()
.collect(Collectors.toMap(Param::getName, Param::getValue, (k1, k2) -> k1)));
sqlCommend.setLimit(queryStructCmd.getLimit());
String rootPath = catalog.getModelFullPath(queryStructCmd.getModelIds());
sqlCommend.setRootPath(rootPath);
// todo tmp delete
// support detail query
if (queryStructCmd.getQueryType().isNativeAggQuery() && CollectionUtils.isEmpty(sqlCommend.getMetrics())) {
for (Long modelId : queryStructCmd.getModelIds()) {
String internalMetricName = queryStructUtils.generateInternalMetricName(
modelId, queryStructCmd.getGroups());
sqlCommend.getMetrics().add(internalMetricName);
}
}
return sqlCommend;
}
}

View File

@@ -0,0 +1,235 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserReplaceHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserSelectFunctionHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserSelectHelper;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.pojo.MetricTable;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.pojo.SchemaItem;
import com.tencent.supersonic.headless.common.server.request.SqlExecuteReq;
import com.tencent.supersonic.headless.common.server.response.DatabaseResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.service.HeadlessQueryEngine;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.server.engineadapter.EngineAdaptor;
import com.tencent.supersonic.headless.server.engineadapter.EngineAdaptorFactory;
import com.tencent.supersonic.headless.server.pojo.EngineTypeEnum;
import com.tencent.supersonic.headless.server.service.Catalog;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component
@Slf4j
public class QueryReqConverter {
@Autowired
private ModelService domainService;
@Autowired
private HeadlessQueryEngine parserService;
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private Catalog catalog;
public QueryStatement convert(QueryS2SQLReq databaseReq, List<ModelSchemaResp> modelSchemaResps) throws Exception {
if (CollectionUtils.isEmpty(modelSchemaResps)) {
return new QueryStatement();
}
//1.convert name to bizName
convertNameToBizName(databaseReq, modelSchemaResps);
//2.functionName corrector
functionNameCorrector(databaseReq);
//3.correct tableName
correctTableName(databaseReq);
String tableName = SqlParserSelectHelper.getTableName(databaseReq.getSql());
if (StringUtils.isEmpty(tableName)) {
return new QueryStatement();
}
//4.build MetricTables
List<String> allFields = SqlParserSelectHelper.getAllFields(databaseReq.getSql());
List<String> metrics = getMetrics(modelSchemaResps, allFields);
QueryStructReq queryStructCmd = new QueryStructReq();
MetricTable metricTable = new MetricTable();
metricTable.setMetrics(metrics);
Set<String> dimensions = getDimensions(modelSchemaResps, allFields);
metricTable.setDimensions(new ArrayList<>(dimensions));
metricTable.setAlias(tableName.toLowerCase());
// if metric empty , fill model default
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
metricTable.setMetrics(new ArrayList<>());
for (Long modelId : databaseReq.getModelIds()) {
metricTable.getMetrics().add(queryStructUtils.generateInternalMetricName(modelId,
metricTable.getDimensions()));
}
} else {
queryStructCmd.setAggregators(
metricTable.getMetrics().stream().map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN)).collect(
Collectors.toList()));
}
AggOption aggOption = getAggOption(databaseReq);
metricTable.setAggOption(aggOption);
List<MetricTable> tables = new ArrayList<>();
tables.add(metricTable);
//4.build ParseSqlReq
ParseSqlReq result = new ParseSqlReq();
BeanUtils.copyProperties(databaseReq, result);
result.setRootPath(catalog.getModelFullPath(databaseReq.getModelIds()));
result.setTables(tables);
DatabaseResp database = catalog.getDatabaseByModelId(databaseReq.getModelIds().get(0));
if (!queryStructUtils.isSupportWith(EngineTypeEnum.valueOf(database.getType().toUpperCase()),
database.getVersion())) {
result.setSupportWith(false);
result.setWithAlias(false);
}
//5.physicalSql by ParseSqlReq
queryStructCmd.setDateInfo(queryStructUtils.getDateConfBySql(databaseReq.getSql()));
queryStructCmd.setModelIds(databaseReq.getModelIds().stream().collect(Collectors.toSet()));
queryStructCmd.setQueryType(getQueryType(aggOption));
log.info("QueryReqConverter queryStructCmd[{}]", queryStructCmd);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructCmd);
queryStatement.setParseSqlReq(result);
queryStatement.setIsS2SQL(true);
queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructCmd));
queryStatement = parserService.plan(queryStatement);
queryStatement.setSql(String.format(SqlExecuteReq.LIMIT_WRAPPER, queryStatement.getSql()));
return queryStatement;
}
private AggOption getAggOption(QueryS2SQLReq databaseReq) {
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
String sql = databaseReq.getSql();
if (!SqlParserSelectHelper.hasGroupBy(sql)
|| SqlParserSelectFunctionHelper.hasFunction(sql, "count")
|| SqlParserSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
return AggOption.NATIVE;
}
return AggOption.DEFAULT;
}
private void convertNameToBizName(QueryS2SQLReq databaseReq, List<ModelSchemaResp> modelSchemaResps) {
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(modelSchemaResps);
String sql = databaseReq.getSql();
log.info("convert name to bizName before:{}", sql);
String replaceFields = SqlParserReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.info("convert name to bizName after:{}", replaceFields);
databaseReq.setSql(replaceFields);
}
private Set<String> getDimensions(List<ModelSchemaResp> modelSchemaResps, List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = modelSchemaResps.stream()
.flatMap(modelSchemaResp -> modelSchemaResp.getDimensions().stream())
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), SchemaItem::getBizName,
(k1, k2) -> k1));
Map<String, String> internalLowerToNameMap = QueryStructUtils.internalCols.stream()
.collect(Collectors.toMap(String::toLowerCase, a -> a));
dimensionLowerToNameMap.putAll(internalLowerToNameMap);
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase())).collect(Collectors.toSet());
}
private List<String> getMetrics(List<ModelSchemaResp> modelSchemaResps, List<String> allFields) {
Map<String, String> metricLowerToNameMap = modelSchemaResps.stream()
.flatMap(modelSchemaResp -> modelSchemaResp.getMetrics().stream())
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), SchemaItem::getBizName));
return allFields.stream().filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase())).collect(Collectors.toList());
}
private void functionNameCorrector(QueryS2SQLReq databaseReq) {
DatabaseResp database = catalog.getDatabaseByModelId(databaseReq.getModelIds().get(0));
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
EngineAdaptor engineAdaptor = EngineAdaptorFactory.getEngineAdaptor(type.toLowerCase());
log.info("type:{},engineAdaptor:{}", type, engineAdaptor);
if (Objects.nonNull(engineAdaptor)) {
String functionNameCorrector = engineAdaptor.functionNameCorrector(databaseReq.getSql());
log.info("sql:{} ,after corrector", databaseReq.getSql(), functionNameCorrector);
databaseReq.setSql(functionNameCorrector);
}
}
protected Map<String, String> getFieldNameToBizNameMap(List<ModelSchemaResp> modelSchemaResps) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = modelSchemaResps.stream().flatMap(modelSchemaResp
-> modelSchemaResp.getDimensions().stream())
.flatMap(entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = modelSchemaResps.stream().flatMap(modelSchemaResp
-> modelSchemaResp.getMetrics().stream())
.flatMap(entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name, String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
public void correctTableName(QueryS2SQLReq databaseReq) {
String sql = databaseReq.getSql();
for (Long modelId : databaseReq.getModelIds()) {
sql = SqlParserReplaceHelper.replaceTable(sql, Constants.TABLE_PREFIX + modelId);
}
databaseReq.setSql(sql);
}
private QueryType getQueryType(AggOption aggOption) {
boolean isAgg = AggOption.isAgg(aggOption);
QueryType queryType = QueryType.TAG;
if (isAgg) {
queryType = QueryType.METRIC;
}
return queryType;
}
}

View File

@@ -0,0 +1,75 @@
package com.tencent.supersonic.headless.core.parser.convert;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.enums.ModelSourceType;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.QueryStructUtils;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Objects;
/**
* correct the Query parameters when the model source type is zipper
*/
@Component("ZipperModelConverter")
@Slf4j
public class ZipperModelConverter implements HeadlessConverter {
private final QueryStructUtils queryStructUtils;
private final Catalog catalog;
public ZipperModelConverter(QueryStructUtils queryStructUtils,
Catalog catalog) {
this.queryStructUtils = queryStructUtils;
this.catalog = catalog;
}
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryStructReq())) {
return false;
}
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
List<ModelResp> modelRespList = catalog.getModelList(queryStructCmd.getModelIds());
if (!CollectionUtils.isEmpty(modelRespList)) {
// all data sources are zipper tables
long zipperCnt = modelRespList.stream().filter(m -> ModelSourceType.isZipper(m.getSourceType()))
.count();
return modelRespList.size() == zipperCnt;
}
return false;
}
@Override
public void converter(Catalog catalog, QueryStatement queryStatement) throws Exception {
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
ParseSqlReq sqlCommend = queryStatement.getParseSqlReq();
MetricReq metricCommand = queryStatement.getMetricReq();
doSingleZipperSource(queryStructCmd, sqlCommend, metricCommand);
}
protected void doSingleZipperSource(QueryStructReq queryStructCmd, ParseSqlReq sqlCommend,
MetricReq metricCommand) {
// all data sources are zipper tables
// request time field rewrite to start_ end_
if (!sqlCommend.getSql().isEmpty()) {
String sqlNew = queryStructUtils.generateZipperWhere(queryStructCmd, sqlCommend);
log.info("doSingleZipperSource before[{}] after[{}]", sqlCommend.getSql(), sqlNew);
sqlCommend.setSql(sqlNew);
} else {
String where = queryStructUtils.generateZipperWhere(queryStructCmd);
if (!where.isEmpty() && Objects.nonNull(metricCommand)) {
log.info("doSingleZipperSource before[{}] after[{}]", metricCommand.getWhere(), where);
metricCommand.setWhere(where);
}
}
}
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.headless.core.persistence.mapper;
import com.tencent.supersonic.headless.common.server.pojo.QueryStat;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import java.util.List;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface StatMapper {
Boolean createRecord(QueryStat queryStatInfo);
List<QueryStat> getStatInfo(ItemUseReq itemUseCommend);
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.headless.core.persistence.pojo;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataDownload {
List<List<String>> headers;
List<List<String>> data;
}

View File

@@ -0,0 +1,32 @@
package com.tencent.supersonic.headless.core.persistence.pojo;
public class ParserSvrResponse<T> {
private String code;
private String msg;
private T data;
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
}

View File

@@ -0,0 +1,38 @@
package com.tencent.supersonic.headless.core.persistence.pojo;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import lombok.Data;
import org.apache.commons.lang3.tuple.ImmutablePair;
import java.util.List;
import org.apache.commons.lang3.tuple.Triple;
@Data
public class QueryStatement {
private List<Long> modelIds;
private String sql = "";
private String sourceId = "";
private String errMsg = "";
private Boolean ok;
private QueryStructReq queryStructReq;
private MetricReq metricReq;
private ParseSqlReq parseSqlReq;
private Integer status = 0;
private Boolean isS2SQL = false;
private List<ImmutablePair<String, String>> timeRanges;
private Boolean enableOptimize = true;
private Triple<String, String, String> minMaxTime;
public boolean isOk() {
this.ok = "".equals(errMsg) && !"".equals(sql);
return ok;
}
public QueryStatement error(String msg) {
this.setErrMsg(msg);
return this;
}
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.headless.core.persistence.repository;
import com.tencent.supersonic.headless.common.server.pojo.QueryStat;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import java.util.List;
public interface StatRepository {
Boolean createRecord(QueryStat queryStatInfo);
List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend);
List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend);
}

View File

@@ -0,0 +1,99 @@
package com.tencent.supersonic.headless.core.persistence.repository;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.common.server.pojo.QueryStat;
import com.tencent.supersonic.headless.core.persistence.mapper.StatMapper;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Repository;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
@Slf4j
@Repository
public class StatRepositoryImpl implements StatRepository {
private final StatMapper statMapper;
private final ObjectMapper mapper = new ObjectMapper();
public StatRepositoryImpl(StatMapper statMapper) {
this.statMapper = statMapper;
}
@Override
public Boolean createRecord(QueryStat queryStatInfo) {
return statMapper.createRecord(queryStatInfo);
}
@Override
@SneakyThrows
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseReq) {
List<ItemUseResp> result = new ArrayList<>();
List<QueryStat> statInfos = statMapper.getStatInfo(itemUseReq);
Map<String, Long> map = new ConcurrentHashMap<>();
statInfos.stream().forEach(stat -> {
String dimensions = stat.getDimensions();
String metrics = stat.getMetrics();
updateStatMapInfo(map, dimensions, TypeEnums.DIMENSION.getName(), stat.getModelId());
updateStatMapInfo(map, metrics, TypeEnums.METRIC.getName(), stat.getModelId());
});
map.forEach((k, v) -> {
Long classId = Long.parseLong(k.split(AT_SYMBOL + AT_SYMBOL)[0]);
String type = k.split(AT_SYMBOL + AT_SYMBOL)[1];
String nameEn = k.split(AT_SYMBOL + AT_SYMBOL)[2];
result.add(new ItemUseResp(classId, type, nameEn, v));
});
return result.stream().sorted(Comparator.comparing(ItemUseResp::getUseCnt).reversed())
.collect(Collectors.toList());
}
@Override
public List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend) {
return statMapper.getStatInfo(itemUseCommend);
}
private void updateStatMapInfo(Map<String, Long> map, String dimensions, String type, Long modelId) {
if (Strings.isNotEmpty(dimensions)) {
try {
List<String> dimensionList = mapper.readValue(dimensions, new TypeReference<List<String>>() {
});
dimensionList.stream().forEach(dimension -> {
String key = modelId + AT_SYMBOL + AT_SYMBOL + type + AT_SYMBOL + AT_SYMBOL + dimension;
if (map.containsKey(key)) {
map.put(key, map.get(key) + 1);
} else {
map.put(key, 1L);
}
});
} catch (Exception e) {
log.warn("e:{}", e);
}
}
}
private void updateStatMapInfo(Map<String, Long> map, Long modelId, String type) {
if (Objects.nonNull(modelId)) {
String key = type + AT_SYMBOL + AT_SYMBOL + modelId;
if (map.containsKey(key)) {
map.put(key, map.get(key) + 1);
} else {
map.put(key, 1L);
}
}
}
}

View File

@@ -0,0 +1,161 @@
package com.tencent.supersonic.headless.core.rest;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.common.core.request.BatchDownloadReq;
import com.tencent.supersonic.headless.common.core.request.DownloadStructReq;
import com.tencent.supersonic.headless.common.core.request.ExplainSqlReq;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryItemReq;
import com.tencent.supersonic.headless.common.core.request.QueryDimValueReq;
import com.tencent.supersonic.headless.common.core.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.core.response.ItemQueryResultResp;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.common.server.enums.QueryType;
import com.tencent.supersonic.headless.common.server.response.ExplainResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.common.core.response.SqlParserResp;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.service.DownloadService;
import com.tencent.supersonic.headless.core.service.HeadlessQueryEngine;
import com.tencent.supersonic.headless.core.service.QueryService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
@RestController
@RequestMapping("/api/semantic/query")
@Slf4j
public class QueryController {
@Autowired
private QueryService queryService;
@Autowired
private HeadlessQueryEngine headlessQueryEngine;
@Autowired
private DownloadService downloadService;
@PostMapping("/sql")
public Object queryBySql(@RequestBody QueryS2SQLReq queryS2SQLReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
Object queryBySql = queryService.queryBySql(queryS2SQLReq, user);
log.info("queryBySql:{}", queryBySql);
return queryBySql;
}
@PostMapping("/struct")
public Object queryByStruct(@RequestBody QueryStructReq queryStructReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
return queryService.queryByStructWithAuth(queryStructReq, user);
}
@PostMapping("/metricDataQueryById")
public ItemQueryResultResp metricDataQueryById(@RequestBody QueryItemReq queryApiReq,
HttpServletRequest request) throws Exception {
return queryService.metricDataQueryById(queryApiReq, request);
}
@PostMapping("/download/struct")
public void downloadByStruct(@RequestBody DownloadStructReq downloadStructReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
downloadService.downloadByStruct(downloadStructReq, user, response);
}
@PostMapping("/download/batch")
public void downloadBatch(@RequestBody BatchDownloadReq batchDownloadReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
downloadService.batchDownload(batchDownloadReq, user, response);
}
@PostMapping("/queryStatement")
public Object queryStatement(@RequestBody QueryStatement queryStatement) throws Exception {
return queryService.queryByQueryStatement(queryStatement);
}
@PostMapping("/struct/parse")
public SqlParserResp parseByStruct(@RequestBody ParseSqlReq parseSqlReq) throws Exception {
QueryStructReq queryStructCmd = new QueryStructReq();
QueryStatement queryStatement = headlessQueryEngine.physicalSql(queryStructCmd, parseSqlReq);
SqlParserResp sqlParserResp = new SqlParserResp();
BeanUtils.copyProperties(queryStatement, sqlParserResp);
return sqlParserResp;
}
/**
* queryByMultiStruct
*/
@PostMapping("/multiStruct")
public Object queryByMultiStruct(@RequestBody QueryMultiStructReq queryMultiStructReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
return queryService.queryByMultiStruct(queryMultiStructReq, user);
}
/**
* getStatInfo
* query the used frequency of the metric/dimension
*
* @param itemUseReq
*/
@PostMapping("/stat")
public List<ItemUseResp> getStatInfo(@RequestBody ItemUseReq itemUseReq) {
return queryService.getStatInfo(itemUseReq);
}
@PostMapping("/queryDimValue")
public QueryResultWithSchemaResp queryDimValue(@RequestBody QueryDimValueReq queryDimValueReq,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return queryService.queryDimValue(queryDimValueReq, user);
}
@PostMapping("/explain")
public <T> ExplainResp explain(@RequestBody ExplainSqlReq<T> explainSqlReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
String queryReqJson = JsonUtil.toString(explainSqlReq.getQueryReq());
QueryType queryTypeEnum = explainSqlReq.getQueryTypeEnum();
if (QueryType.SQL.equals(queryTypeEnum)) {
QueryS2SQLReq queryS2SQLReq = JsonUtil.toObject(queryReqJson, QueryS2SQLReq.class);
ExplainSqlReq<QueryS2SQLReq> explainSqlReqNew = ExplainSqlReq.<QueryS2SQLReq>builder()
.queryReq(queryS2SQLReq)
.queryTypeEnum(queryTypeEnum).build();
return queryService.explain(explainSqlReqNew, user);
}
if (QueryType.STRUCT.equals(queryTypeEnum)) {
QueryStructReq queryStructReq = JsonUtil.toObject(queryReqJson, QueryStructReq.class);
ExplainSqlReq<QueryStructReq> explainSqlReqNew = ExplainSqlReq.<QueryStructReq>builder()
.queryReq(queryStructReq)
.queryTypeEnum(queryTypeEnum).build();
return queryService.explain(explainSqlReqNew, user);
}
return null;
}
}

View File

@@ -0,0 +1,75 @@
package com.tencent.supersonic.headless.core.rest;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.request.PageDimensionReq;
import com.tencent.supersonic.headless.common.server.request.PageMetricReq;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.DomainResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.core.service.SchemaService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
@RestController
@RequestMapping("/api/semantic/schema")
public class SchemaController {
@Autowired
private SchemaService schemaService;
@PostMapping
public List<ModelSchemaResp> fetchModelSchema(@RequestBody ModelSchemaFilterReq filter,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.fetchModelSchema(filter, user);
}
@GetMapping("/domain/list")
public List<DomainResp> getDomainList(HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.getDomainList(user);
}
@GetMapping("/model/list")
public List<ModelResp> getModelList(@RequestParam("domainId") Long domainId,
@RequestParam("authType") String authType,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.getModelList(user, AuthType.valueOf(authType), domainId);
}
@PostMapping("/dimension/page")
public PageInfo<DimensionResp> queryDimension(@RequestBody PageDimensionReq pageDimensionCmd,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryDimension(pageDimensionCmd, user);
}
@PostMapping("/metric/page")
public PageInfo<MetricResp> queryMetric(@RequestBody PageMetricReq pageMetricCmd,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryMetric(pageMetricCmd, user);
}
}

View File

@@ -0,0 +1,14 @@
package com.tencent.supersonic.headless.core.service;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.headless.common.core.request.BatchDownloadReq;
import com.tencent.supersonic.headless.common.core.request.DownloadStructReq;
import javax.servlet.http.HttpServletResponse;
public interface DownloadService {
void downloadByStruct(DownloadStructReq downloadStructReq,
User user, HttpServletResponse response) throws Exception;
void batchDownload(BatchDownloadReq batchDownloadReq, User user, HttpServletResponse response) throws Exception;
}

View File

@@ -0,0 +1,21 @@
package com.tencent.supersonic.headless.core.service;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
public interface HeadlessQueryEngine {
QueryStatement plan(QueryStatement queryStatement) throws Exception;
QueryExecutor route(QueryStatement queryStatement);
QueryResultWithSchemaResp execute(QueryStatement queryStatement);
QueryStatement physicalSql(QueryStructReq queryStructCmd, ParseSqlReq sqlCommend) throws Exception;
QueryStatement physicalSql(QueryStructReq queryStructCmd, MetricReq sqlCommend) throws Exception;
}

View File

@@ -0,0 +1,47 @@
package com.tencent.supersonic.headless.core.service;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.headless.common.core.request.QueryItemReq;
import com.tencent.supersonic.headless.common.core.response.ItemQueryResultResp;
import com.tencent.supersonic.headless.common.server.response.ExplainResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.common.core.request.ExplainSqlReq;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.QueryDimValueReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.core.annotation.ApiHeaderCheck;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
public interface QueryService {
Object queryBySql(QueryS2SQLReq querySqlCmd, User user) throws Exception;
QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception;
QueryResultWithSchemaResp queryByStructWithAuth(QueryStructReq queryStructCmd, User user)
throws Exception;
QueryResultWithSchemaResp queryByMultiStruct(QueryMultiStructReq queryMultiStructCmd, User user) throws Exception;
QueryResultWithSchemaResp queryDimValue(QueryDimValueReq queryDimValueReq, User user);
Object queryByQueryStatement(QueryStatement queryStatement);
List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend);
<T> ExplainResp explain(ExplainSqlReq<T> explainSqlReq, User user) throws Exception;
@ApiHeaderCheck
ItemQueryResultResp metricDataQueryById(QueryItemReq queryApiReq,
HttpServletRequest request) throws Exception;
QueryStatement parseMetricReq(MetricReq metricReq) throws Exception;
}

View File

@@ -0,0 +1,28 @@
package com.tencent.supersonic.headless.core.service;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.request.PageDimensionReq;
import com.tencent.supersonic.headless.common.server.request.PageMetricReq;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.DomainResp;
import java.util.List;
public interface SchemaService {
List<ModelSchemaResp> fetchModelSchema(ModelSchemaFilterReq filter, User user);
PageInfo<DimensionResp> queryDimension(PageDimensionReq pageDimensionReq, User user);
PageInfo<MetricResp> queryMetric(PageMetricReq pageMetricReq, User user);
List<DomainResp> getDomainList(User user);
List<ModelResp> getModelList(User user, AuthType authType, Long domainId);
}

View File

@@ -0,0 +1,306 @@
package com.tencent.supersonic.headless.core.service.impl;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.ExcelWriter;
import com.alibaba.excel.util.FileUtils;
import com.alibaba.excel.write.metadata.WriteSheet;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.core.persistence.pojo.DataDownload;
import com.tencent.supersonic.headless.common.core.request.BatchDownloadReq;
import com.tencent.supersonic.headless.common.core.request.DownloadStructReq;
import com.tencent.supersonic.headless.common.server.enums.SemanticType;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.response.DimSchemaResp;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.MetricSchemaResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.service.QueryService;
import com.tencent.supersonic.headless.core.utils.DataTransformUtils;
import com.tencent.supersonic.headless.core.service.DownloadService;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URLEncoder;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@Slf4j
@Service
public class DownloadServiceImpl implements DownloadService {
private static final String internMetricCol = "指标名称";
private static final long downloadSize = 10000;
private ModelService modelService;
private QueryService queryService;
public DownloadServiceImpl(ModelService modelService, QueryService queryService) {
this.modelService = modelService;
this.queryService = queryService;
}
@Override
public void downloadByStruct(DownloadStructReq downloadStructReq,
User user, HttpServletResponse response) throws Exception {
String fileName = String.format("%s_%s.xlsx", "supersonic", DateUtils.format(new Date(), DateUtils.FORMAT));
File file = FileUtils.createTmpFile(fileName);
try {
QueryResultWithSchemaResp queryResult = queryService.queryByStructWithAuth(downloadStructReq, user);
DataDownload dataDownload = buildDataDownload(queryResult, downloadStructReq);
EasyExcel.write(file).sheet("Sheet1").head(dataDownload.getHeaders()).doWrite(dataDownload.getData());
} catch (RuntimeException e) {
EasyExcel.write(file).sheet("Sheet1").head(buildErrMessageHead())
.doWrite(buildErrMessageData(e.getMessage()));
return;
}
downloadFile(response, file, fileName);
}
@Override
public void batchDownload(BatchDownloadReq batchDownloadReq, User user,
HttpServletResponse response) throws Exception {
String fileName = String.format("%s_%s.xlsx", "supersonic", DateUtils.format(new Date(), DateUtils.FORMAT));
File file = FileUtils.createTmpFile(fileName);
List<Long> metricIds = batchDownloadReq.getMetricIds();
if (CollectionUtils.isEmpty(metricIds)) {
return;
}
batchDownload(batchDownloadReq, user, file);
downloadFile(response, file, fileName);
}
public void batchDownload(BatchDownloadReq batchDownloadReq, User user, File file) throws Exception {
List<Long> metricIds = batchDownloadReq.getMetricIds();
List<ModelSchemaResp> modelSchemaRespList = modelService.fetchModelSchema(new ModelSchemaFilterReq());
Map<String, List<MetricSchemaResp>> metricSchemaMap = getMetricSchemaMap(modelSchemaRespList, metricIds);
Map<Long, DimSchemaResp> dimensionRespMap = getDimensionMap(modelSchemaRespList);
ExcelWriter excelWriter = EasyExcel.write(file).build();
int sheetCount = 1;
for (List<MetricSchemaResp> metrics : metricSchemaMap.values()) {
if (CollectionUtils.isEmpty(metrics)) {
continue;
}
MetricSchemaResp metricSchemaResp = metrics.get(0);
List<DimSchemaResp> dimensions = getMetricRelaDimensions(metricSchemaResp, dimensionRespMap);
for (MetricSchemaResp metric : metrics) {
try {
DownloadStructReq downloadStructReq = buildDownloadStructReq(dimensions, metric, batchDownloadReq);
QueryResultWithSchemaResp queryResult = queryService.queryByStructWithAuth(downloadStructReq, user);
DataDownload dataDownload = buildDataDownload(queryResult, downloadStructReq);
WriteSheet writeSheet = EasyExcel.writerSheet("Sheet" + sheetCount)
.head(dataDownload.getHeaders()).build();
excelWriter.write(dataDownload.getData(), writeSheet);
} catch (RuntimeException e) {
EasyExcel.write(file).sheet("Sheet1").head(buildErrMessageHead())
.doWrite(buildErrMessageData(e.getMessage()));
return;
}
}
sheetCount++;
}
excelWriter.finish();
}
private List<List<String>> buildErrMessageHead() {
List<List<String>> headers = Lists.newArrayList();
headers.add(Lists.newArrayList("异常提示"));
return headers;
}
private List<List<String>> buildErrMessageData(String errMsg) {
List<List<String>> data = Lists.newArrayList();
data.add(Lists.newArrayList(errMsg));
return data;
}
private List<List<String>> buildHeader(QueryResultWithSchemaResp queryResultWithSchemaResp) {
List<List<String>> header = Lists.newArrayList();
for (QueryColumn column : queryResultWithSchemaResp.getColumns()) {
header.add(Lists.newArrayList(column.getName()));
}
return header;
}
private List<List<String>> buildHeader(List<QueryColumn> queryColumns, List<String> dateList) {
List<List<String>> headers = Lists.newArrayList();
for (QueryColumn queryColumn : queryColumns) {
if (SemanticType.DATE.name().equals(queryColumn.getShowType())) {
continue;
}
headers.add(Lists.newArrayList(queryColumn.getName()));
}
for (String date : dateList) {
headers.add(Lists.newArrayList(date));
}
headers.add(Lists.newArrayList(internMetricCol));
return headers;
}
private List<List<String>> buildData(QueryResultWithSchemaResp queryResultWithSchemaResp) {
List<List<String>> data = new ArrayList<>();
for (Map<String, Object> row : queryResultWithSchemaResp.getResultList()) {
List<String> rowData = new ArrayList<>();
for (QueryColumn column : queryResultWithSchemaResp.getColumns()) {
rowData.add(String.valueOf(row.get(column.getNameEn())));
}
data.add(rowData);
}
return data;
}
private List<List<String>> buildData(List<List<String>> headers, Map<String, String> nameMap,
List<Map<String, Object>> dataTransformed, String metricName) {
List<List<String>> data = Lists.newArrayList();
for (Map<String, Object> map : dataTransformed) {
List<String> row = Lists.newArrayList();
for (List<String> header : headers) {
String head = header.get(0);
if (internMetricCol.equals(head)) {
continue;
}
Object object = map.getOrDefault(nameMap.getOrDefault(head, head), "");
if (object == null) {
row.add("");
} else {
row.add(String.valueOf(object));
}
}
row.add(metricName);
data.add(row);
}
return data;
}
private DataDownload buildDataDownload(QueryResultWithSchemaResp queryResult, DownloadStructReq downloadStructReq) {
List<QueryColumn> metricColumns = queryResult.getMetricColumns();
List<QueryColumn> dimensionColumns = queryResult.getDimensionColumns();
if (downloadStructReq.isTransform() && !CollectionUtils.isEmpty(metricColumns)) {
QueryColumn metric = metricColumns.get(0);
List<String> groups = downloadStructReq.getGroups();
List<Map<String, Object>> dataTransformed = DataTransformUtils.transform(queryResult.getResultList(),
metric.getNameEn(), groups, downloadStructReq.getDateInfo());
List<List<String>> headers = buildHeader(dimensionColumns, downloadStructReq.getDateInfo().getDateList());
List<List<String>> data = buildData(headers, getDimensionNameMap(dimensionColumns),
dataTransformed, metric.getName());
return DataDownload.builder().headers(headers).data(data).build();
} else {
List<List<String>> data = buildData(queryResult);
List<List<String>> header = buildHeader(queryResult);
return DataDownload.builder().data(data).headers(header).build();
}
}
private DownloadStructReq buildDownloadStructReq(List<DimSchemaResp> dimensionResps, MetricResp metricResp,
BatchDownloadReq batchDownloadReq) {
DateConf dateConf = batchDownloadReq.getDateInfo();
Set<Long> modelIds = dimensionResps.stream().map(DimSchemaResp::getModelId).collect(Collectors.toSet());
modelIds.add(metricResp.getModelId());
DownloadStructReq downloadStructReq = new DownloadStructReq();
downloadStructReq.setGroups(dimensionResps.stream()
.map(DimSchemaResp::getBizName).collect(Collectors.toList()));
downloadStructReq.getGroups().add(0, getTimeDimension(dateConf));
Aggregator aggregator = new Aggregator();
aggregator.setColumn(metricResp.getBizName());
downloadStructReq.setAggregators(Lists.newArrayList(aggregator));
downloadStructReq.setDateInfo(dateConf);
downloadStructReq.setModelIds(modelIds);
downloadStructReq.setLimit(downloadSize);
downloadStructReq.setIsTransform(batchDownloadReq.isTransform());
return downloadStructReq;
}
private String getTimeDimension(DateConf dateConf) {
if (Constants.MONTH.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.MONTH.getName();
} else if (Constants.WEEK.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.WEEK.getName();
} else {
return TimeDimensionEnum.DAY.getName();
}
}
private Map<String, List<MetricSchemaResp>> getMetricSchemaMap(List<ModelSchemaResp> modelSchemaRespList,
List<Long> metricIds) {
return modelSchemaRespList.stream().flatMap(modelSchemaResp
-> modelSchemaResp.getMetrics().stream())
.filter(metricSchemaResp -> metricIds.contains(metricSchemaResp.getId()))
.collect(Collectors.groupingBy(MetricSchemaResp::getRelaDimensionIdKey));
}
private Map<Long, DimSchemaResp> getDimensionMap(List<ModelSchemaResp> modelSchemaRespList) {
return modelSchemaRespList.stream().flatMap(modelSchemaResp
-> modelSchemaResp.getDimensions().stream())
.collect(Collectors.toMap(DimensionResp::getId, dimensionResp -> dimensionResp));
}
private Map<String, String> getDimensionNameMap(List<QueryColumn> queryColumns) {
return queryColumns.stream().collect(Collectors.toMap(QueryColumn::getName, QueryColumn::getNameEn));
}
private List<DimSchemaResp> getMetricRelaDimensions(MetricSchemaResp metricSchemaResp,
Map<Long, DimSchemaResp> dimensionRespMap) {
if (metricSchemaResp.getRelateDimension() == null
|| CollectionUtils.isEmpty(metricSchemaResp.getRelateDimension().getDrillDownDimensions())) {
return Lists.newArrayList();
}
return metricSchemaResp.getRelateDimension().getDrillDownDimensions()
.stream().map(drillDownDimension -> dimensionRespMap.get(drillDownDimension.getDimensionId()))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
private void downloadFile(HttpServletResponse response, File file, String filename) {
try {
byte[] buffer = readFileToByteArray(file);
response.reset();
response.setCharacterEncoding("UTF-8");
response.addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(filename, "UTF-8"));
response.addHeader("Content-Length", "" + file.length());
try (OutputStream outputStream = new BufferedOutputStream(response.getOutputStream())) {
response.setContentType("application/octet-stream");
outputStream.write(buffer);
outputStream.flush();
}
} catch (Exception e) {
log.error("failed to download file", e);
}
}
private byte[] readFileToByteArray(File file) throws IOException {
try (InputStream fis = new BufferedInputStream(Files.newInputStream(file.toPath()))) {
byte[] buffer = new byte[fis.available()];
fis.read(buffer);
return buffer;
}
}
}

View File

@@ -0,0 +1,89 @@
package com.tencent.supersonic.headless.core.service.impl;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.parser.QueryParser;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.service.HeadlessQueryEngine;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.core.utils.QueryUtils;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class HeadlessQueryEngineImpl implements HeadlessQueryEngine {
private final QueryParser queryParser;
private final Catalog catalog;
private final QueryUtils queryUtils;
public HeadlessQueryEngineImpl(QueryParser queryParser, Catalog catalog,
QueryUtils queryUtils) {
this.queryParser = queryParser;
this.catalog = catalog;
this.queryUtils = queryUtils;
}
public QueryResultWithSchemaResp execute(QueryStatement queryStatement) {
QueryResultWithSchemaResp queryResultWithColumns = null;
QueryExecutor queryExecutor = route(queryStatement);
if (queryExecutor != null) {
queryResultWithColumns = queryExecutor.execute(catalog, queryStatement);
queryResultWithColumns.setSql(queryStatement.getSql());
if (!CollectionUtils.isEmpty(queryStatement.getModelIds())) {
queryUtils.fillItemNameInfo(queryResultWithColumns, queryStatement.getModelIds());
}
}
return queryResultWithColumns;
}
public QueryStatement plan(QueryStatement queryStatement) throws Exception {
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement = queryParser.logicSql(queryStatement);
queryUtils.checkSqlParse(queryStatement);
queryStatement.setModelIds(queryStatement.getQueryStructReq().getModelIds());
log.info("queryStatement:{}", queryStatement);
return optimize(queryStatement.getQueryStructReq(), queryStatement);
}
public QueryStatement optimize(QueryStructReq queryStructCmd, QueryStatement queryStatement) {
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStructCmd, queryStatement);
}
return queryStatement;
}
public QueryExecutor route(QueryStatement queryStatement) {
for (QueryExecutor queryExecutor : ComponentFactory.getQueryExecutors()) {
if (queryExecutor.accept(queryStatement)) {
return queryExecutor;
}
}
return null;
}
@Override
public QueryStatement physicalSql(QueryStructReq queryStructCmd, ParseSqlReq sqlCommend) throws Exception {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructCmd);
queryStatement.setParseSqlReq(sqlCommend);
queryStatement.setIsS2SQL(true);
return optimize(queryStructCmd, queryParser.parser(sqlCommend, queryStatement));
}
public QueryStatement physicalSql(QueryStructReq queryStructCmd, MetricReq metricCommand) throws Exception {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructCmd);
queryStatement.setMetricReq(metricCommand);
queryStatement.setIsS2SQL(false);
return queryParser.parser(queryStatement);
}
}

View File

@@ -0,0 +1,428 @@
package com.tencent.supersonic.headless.core.service.impl;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.enums.ApiItemType;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.headless.common.core.pojo.SingleItemQueryResult;
import com.tencent.supersonic.headless.common.core.pojo.Cache;
import com.tencent.supersonic.headless.common.core.request.ExplainSqlReq;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.common.core.request.QueryItemReq;
import com.tencent.supersonic.headless.common.core.request.QueryDimValueReq;
import com.tencent.supersonic.headless.common.core.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.core.response.ItemQueryResultResp;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.common.server.enums.QueryType;
import com.tencent.supersonic.headless.common.server.pojo.Dim;
import com.tencent.supersonic.headless.common.server.pojo.Item;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.response.AppDetailResp;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.ExplainResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.annotation.ApiHeaderCheck;
import com.tencent.supersonic.headless.core.annotation.S2SQLDataPermission;
import com.tencent.supersonic.headless.core.annotation.StructDataPermission;
import com.tencent.supersonic.headless.core.aspect.ApiHeaderCheckAspect;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.parser.convert.QueryReqConverter;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.service.HeadlessQueryEngine;
import com.tencent.supersonic.headless.core.service.QueryService;
import com.tencent.supersonic.headless.core.service.SchemaService;
import com.tencent.supersonic.headless.core.utils.QueryUtils;
import com.tencent.supersonic.headless.core.utils.StatUtils;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.service.AppService;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@Service
@Slf4j
public class QueryServiceImpl implements QueryService {
protected final com.google.common.cache.Cache<String, List<ItemUseResp>> itemUseCache =
CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.DAYS).build();
private final StatUtils statUtils;
private final CacheUtils cacheUtils;
private final QueryUtils queryUtils;
private final QueryReqConverter queryReqConverter;
private final Catalog catalog;
private final AppService appService;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
private final HeadlessQueryEngine headlessQueryEngine;
public QueryServiceImpl(
StatUtils statUtils,
CacheUtils cacheUtils,
QueryUtils queryUtils,
QueryReqConverter queryReqConverter,
HeadlessQueryEngine headlessQueryEngine,
Catalog catalog,
AppService appService) {
this.statUtils = statUtils;
this.cacheUtils = cacheUtils;
this.queryUtils = queryUtils;
this.queryReqConverter = queryReqConverter;
this.headlessQueryEngine = headlessQueryEngine;
this.catalog = catalog;
this.appService = appService;
}
@Override
@S2SQLDataPermission
@SneakyThrows
public Object queryBySql(QueryS2SQLReq queryS2SQLReq, User user) {
statUtils.initStatInfo(queryS2SQLReq, user);
QueryStatement queryStatement = new QueryStatement();
try {
queryStatement = convertToQueryStatement(queryS2SQLReq, user);
} catch (Exception e) {
log.info("convertToQueryStatement has a exception:", e);
}
log.info("queryStatement:{}", queryStatement);
QueryResultWithSchemaResp results = headlessQueryEngine.execute(queryStatement);
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
return results;
}
public Object queryByQueryStatement(QueryStatement queryStatement) {
return headlessQueryEngine.execute(queryStatement);
}
private QueryStatement convertToQueryStatement(QueryS2SQLReq querySqlCmd, User user) throws Exception {
ModelSchemaFilterReq filter = new ModelSchemaFilterReq();
filter.setModelIds(querySqlCmd.getModelIds());
SchemaService schemaService = ContextUtils.getBean(SchemaService.class);
List<ModelSchemaResp> modelSchemaResps = schemaService.fetchModelSchema(filter, user);
QueryStatement queryStatement = queryReqConverter.convert(querySqlCmd, modelSchemaResps);
queryStatement.setModelIds(querySqlCmd.getModelIds());
return queryStatement;
}
@Override
public QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception {
QueryResultWithSchemaResp queryResultWithColumns = null;
log.info("[queryStructCmd:{}]", queryStructCmd);
try {
statUtils.initStatInfo(queryStructCmd, user);
String cacheKey = cacheUtils.generateCacheKey(getKeyByModelIds(queryStructCmd.getModelIds()),
queryStructCmd.generateCommandMd5());
handleGlobalCacheDisable(queryStructCmd);
boolean isCache = isCache(queryStructCmd);
if (isCache) {
queryResultWithColumns = queryByCache(cacheKey, queryStructCmd);
if (queryResultWithColumns != null) {
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
return queryResultWithColumns;
}
}
StatUtils.get().setUseResultCache(false);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructCmd);
queryStatement.setIsS2SQL(false);
queryStatement = headlessQueryEngine.plan(queryStatement);
QueryExecutor queryExecutor = headlessQueryEngine.route(queryStatement);
if (queryExecutor != null) {
queryResultWithColumns = headlessQueryEngine.execute(queryStatement);
if (isCache) {
// if queryResultWithColumns is not null, update cache data
queryUtils.cacheResultLogic(cacheKey, queryResultWithColumns);
}
}
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
return queryResultWithColumns;
} catch (Exception e) {
log.warn("exception in queryByStruct, e: ", e);
statUtils.statInfo2DbAsync(TaskStatusEnum.ERROR);
throw e;
}
}
@Override
@StructDataPermission
@SneakyThrows
public QueryResultWithSchemaResp queryByStructWithAuth(QueryStructReq queryStructCmd, User user) {
return queryByStruct(queryStructCmd, user);
}
@Override
public QueryResultWithSchemaResp queryByMultiStruct(QueryMultiStructReq queryMultiStructReq, User user)
throws Exception {
statUtils.initStatInfo(queryMultiStructReq.getQueryStructReqs().get(0), user);
String cacheKey = cacheUtils.generateCacheKey(
getKeyByModelIds(queryMultiStructReq.getQueryStructReqs().get(0).getModelIds()),
queryMultiStructReq.generateCommandMd5());
boolean isCache = isCache(queryMultiStructReq);
QueryResultWithSchemaResp queryResultWithColumns;
if (isCache) {
queryResultWithColumns = queryByCache(cacheKey, queryMultiStructReq);
if (queryResultWithColumns != null) {
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
return queryResultWithColumns;
}
}
log.info("stat queryByStructWithoutCache, queryMultiStructReq:{}", queryMultiStructReq);
try {
QueryStatement sqlParser = getQueryStatementByMultiStruct(queryMultiStructReq);
queryResultWithColumns = headlessQueryEngine.execute(sqlParser);
if (queryResultWithColumns != null) {
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
queryUtils.fillItemNameInfo(queryResultWithColumns, queryMultiStructReq);
}
return queryResultWithColumns;
} catch (Exception e) {
log.warn("exception in queryByMultiStruct, e: ", e);
statUtils.statInfo2DbAsync(TaskStatusEnum.ERROR);
throw e;
}
}
private QueryStatement getQueryStatementByMultiStruct(QueryMultiStructReq queryMultiStructReq) throws Exception {
List<QueryStatement> sqlParsers = new ArrayList<>();
for (QueryStructReq queryStructCmd : queryMultiStructReq.getQueryStructReqs()) {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructCmd);
queryStatement.setIsS2SQL(false);
queryStatement = headlessQueryEngine.plan(queryStatement);
queryUtils.checkSqlParse(queryStatement);
sqlParsers.add(queryStatement);
}
log.info("multi sqlParser:{}", sqlParsers);
return queryUtils.sqlParserUnion(queryMultiStructReq, sqlParsers);
}
@Override
@SneakyThrows
public QueryResultWithSchemaResp queryDimValue(QueryDimValueReq queryDimValueReq, User user) {
QueryS2SQLReq queryS2SQLReq = generateDimValueQuerySql(queryDimValueReq);
return (QueryResultWithSchemaResp) queryBySql(queryS2SQLReq, user);
}
private void handleGlobalCacheDisable(QueryStructReq queryStructCmd) {
if (!cacheEnable) {
Cache cacheInfo = new Cache();
cacheInfo.setCache(false);
queryStructCmd.setCacheInfo(cacheInfo);
}
}
@Override
@SneakyThrows
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseReq) {
if (itemUseReq.getCacheEnable()) {
return itemUseCache.get(JsonUtil.toString(itemUseReq), () -> {
List<ItemUseResp> data = statUtils.getStatInfo(itemUseReq);
itemUseCache.put(JsonUtil.toString(itemUseReq), data);
return data;
});
}
return statUtils.getStatInfo(itemUseReq);
}
@Override
public <T> ExplainResp explain(ExplainSqlReq<T> explainSqlReq, User user) throws Exception {
QueryType queryTypeEnum = explainSqlReq.getQueryTypeEnum();
T queryReq = explainSqlReq.getQueryReq();
if (QueryType.SQL.equals(queryTypeEnum) && queryReq instanceof QueryS2SQLReq) {
QueryStatement queryStatement = convertToQueryStatement((QueryS2SQLReq) queryReq, user);
return getExplainResp(queryStatement);
}
if (QueryType.STRUCT.equals(queryTypeEnum) && queryReq instanceof QueryStructReq) {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq((QueryStructReq) queryReq);
queryStatement.setIsS2SQL(false);
queryStatement = headlessQueryEngine.plan(queryStatement);
return getExplainResp(queryStatement);
}
if (QueryType.STRUCT.equals(queryTypeEnum) && queryReq instanceof QueryMultiStructReq) {
QueryMultiStructReq queryMultiStructReq = (QueryMultiStructReq) queryReq;
QueryStatement queryStatement = getQueryStatementByMultiStruct(queryMultiStructReq);
return getExplainResp(queryStatement);
}
throw new IllegalArgumentException("Parameters are invalid, explainSqlReq: " + explainSqlReq);
}
@Override
@ApiHeaderCheck
public ItemQueryResultResp metricDataQueryById(QueryItemReq queryItemReq,
HttpServletRequest request) throws Exception {
AppDetailResp appDetailResp = getAppDetailResp(request);
authCheck(appDetailResp, queryItemReq.getIds(), ApiItemType.METRIC);
List<SingleItemQueryResult> results = Lists.newArrayList();
Map<Long, Item> map = appDetailResp.getConfig().getItems().stream()
.collect(Collectors.toMap(Item::getId, i -> i));
for (Long id : queryItemReq.getIds()) {
Item item = map.get(id);
SingleItemQueryResult apiQuerySingleResult = dataQuery(appDetailResp.getId(),
item, queryItemReq.getDateConf(), queryItemReq.getLimit());
results.add(apiQuerySingleResult);
}
return ItemQueryResultResp.builder().results(results).build();
}
private SingleItemQueryResult dataQuery(Integer appId, Item item, DateConf dateConf, Long limit) throws Exception {
MetricResp metricResp = catalog.getMetric(item.getId());
List<Item> items = item.getRelateItems();
List<DimensionResp> dimensionResps = Lists.newArrayList();
if (!org.springframework.util.CollectionUtils.isEmpty(items)) {
List<Long> ids = items.stream().map(Item::getId).collect(Collectors.toList());
DimensionFilter dimensionFilter = new DimensionFilter();
dimensionFilter.setIds(ids);
dimensionResps = catalog.getDimensions(dimensionFilter);
}
QueryStructReq queryStructReq = buildQueryStructReq(dimensionResps, metricResp, dateConf, limit);
QueryResultWithSchemaResp queryResultWithSchemaResp =
queryByStruct(queryStructReq, User.getAppUser(appId));
SingleItemQueryResult apiQuerySingleResult = new SingleItemQueryResult();
apiQuerySingleResult.setItem(item);
apiQuerySingleResult.setResult(queryResultWithSchemaResp);
return apiQuerySingleResult;
}
private AppDetailResp getAppDetailResp(HttpServletRequest request) {
int appId = Integer.parseInt(request.getHeader(ApiHeaderCheckAspect.APPID));
return appService.getApp(appId);
}
private QueryStructReq buildQueryStructReq(List<DimensionResp> dimensionResps,
MetricResp metricResp, DateConf dateConf, Long limit) {
Set<Long> modelIds = dimensionResps.stream().map(DimensionResp::getModelId).collect(Collectors.toSet());
modelIds.add(metricResp.getModelId());
QueryStructReq queryStructReq = new QueryStructReq();
queryStructReq.setGroups(dimensionResps.stream()
.map(DimensionResp::getBizName).collect(Collectors.toList()));
queryStructReq.getGroups().add(0, getTimeDimension(dateConf));
Aggregator aggregator = new Aggregator();
aggregator.setColumn(metricResp.getBizName());
queryStructReq.setAggregators(Lists.newArrayList(aggregator));
queryStructReq.setDateInfo(dateConf);
queryStructReq.setModelIds(modelIds);
queryStructReq.setLimit(limit);
return queryStructReq;
}
private String getTimeDimension(DateConf dateConf) {
if (Constants.MONTH.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.MONTH.getName();
} else if (Constants.WEEK.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.WEEK.getName();
} else {
return TimeDimensionEnum.DAY.getName();
}
}
private void authCheck(AppDetailResp appDetailResp, List<Long> ids, ApiItemType type) {
Set<Long> idsInApp = appDetailResp.getConfig().getAllItems().stream()
.filter(item -> type.equals(item.getType())).map(Item::getId).collect(Collectors.toSet());
if (!idsInApp.containsAll(ids)) {
throw new InvalidArgumentException("查询范围超过应用申请范围, 请检查");
}
}
private ExplainResp getExplainResp(QueryStatement queryStatement) {
String sql = "";
if (Objects.nonNull(queryStatement)) {
sql = queryStatement.getSql();
}
return ExplainResp.builder().sql(sql).build();
}
public QueryStatement parseMetricReq(MetricReq metricReq) throws Exception {
QueryStructReq queryStructCmd = new QueryStructReq();
return headlessQueryEngine.physicalSql(queryStructCmd, metricReq);
}
private boolean isCache(QueryStructReq queryStructCmd) {
if (!cacheEnable) {
return false;
}
if (queryStructCmd.getCacheInfo() != null) {
return queryStructCmd.getCacheInfo().getCache();
}
return false;
}
private boolean isCache(QueryMultiStructReq queryStructCmd) {
if (!cacheEnable) {
return false;
}
if (!CollectionUtils.isEmpty(queryStructCmd.getQueryStructReqs())
&& queryStructCmd.getQueryStructReqs().get(0).getCacheInfo() != null) {
return queryStructCmd.getQueryStructReqs().get(0).getCacheInfo().getCache();
}
return false;
}
private QueryResultWithSchemaResp queryByCache(String key, Object queryCmd) {
Object resultObject = cacheUtils.get(key);
if (Objects.nonNull(resultObject)) {
log.info("queryByStructWithCache, key:{}, queryCmd:{}", key, queryCmd.toString());
statUtils.updateResultCacheKey(key);
return (QueryResultWithSchemaResp) resultObject;
}
return null;
}
private QueryS2SQLReq generateDimValueQuerySql(QueryDimValueReq queryDimValueReq) {
QueryS2SQLReq queryS2SQLReq = new QueryS2SQLReq();
List<ModelResp> modelResps = catalog.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));
DimensionResp dimensionResp = catalog.getDimension(queryDimValueReq.getDimensionBizName(),
queryDimValueReq.getModelId());
ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s", dimensionResp.getName(), modelResp.getName());
List<Dim> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s where %s >= '%s' and %s <= '%s'", sql, TimeDimensionEnum.DAY.getName(),
queryDimValueReq.getDateInfo().getStartDate(), TimeDimensionEnum.DAY.getName(),
queryDimValueReq.getDateInfo().getEndDate());
}
queryS2SQLReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId()));
queryS2SQLReq.setSql(sql);
return queryS2SQLReq;
}
private String getKeyByModelIds(List<Long> modelIds) {
return String.join(",", modelIds.stream()
.map(Object::toString).collect(Collectors.toList()));
}
}

View File

@@ -0,0 +1,136 @@
package com.tencent.supersonic.headless.core.service.impl;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.request.PageDimensionReq;
import com.tencent.supersonic.headless.common.server.request.PageMetricReq;
import com.tencent.supersonic.headless.common.server.response.DimSchemaResp;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.DomainResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.MetricSchemaResp;
import com.tencent.supersonic.headless.common.server.response.ModelResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.core.service.QueryService;
import com.tencent.supersonic.headless.core.service.SchemaService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.DomainService;
import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
@Slf4j
@Service
public class SchemaServiceImpl implements SchemaService {
private final QueryService queryService;
private final ModelService modelService;
private final DimensionService dimensionService;
private final MetricService metricService;
private final DomainService domainService;
public SchemaServiceImpl(QueryService queryService,
ModelService modelService,
DimensionService dimensionService,
MetricService metricService,
DomainService domainService) {
this.queryService = queryService;
this.modelService = modelService;
this.dimensionService = dimensionService;
this.metricService = metricService;
this.domainService = domainService;
}
@Override
public List<ModelSchemaResp> fetchModelSchema(ModelSchemaFilterReq filter, User user) {
List<ModelSchemaResp> domainSchemaDescList = modelService.fetchModelSchema(filter);
ItemUseReq itemUseCommend = new ItemUseReq();
itemUseCommend.setModelIds(filter.getModelIds());
List<ItemUseResp> statInfos = queryService.getStatInfo(itemUseCommend);
log.debug("statInfos:{}", statInfos);
fillCnt(domainSchemaDescList, statInfos);
return domainSchemaDescList;
}
private void fillCnt(List<ModelSchemaResp> domainSchemaDescList, List<ItemUseResp> statInfos) {
Map<String, ItemUseResp> typeIdAndStatPair = statInfos.stream()
.collect(Collectors.toMap(
itemUseInfo -> itemUseInfo.getType() + AT_SYMBOL + AT_SYMBOL + itemUseInfo.getBizName(),
itemUseInfo -> itemUseInfo,
(item1, item2) -> item1));
log.debug("typeIdAndStatPair:{}", typeIdAndStatPair);
for (ModelSchemaResp domainSchemaDesc : domainSchemaDescList) {
fillDimCnt(domainSchemaDesc, typeIdAndStatPair);
fillMetricCnt(domainSchemaDesc, typeIdAndStatPair);
}
}
private void fillMetricCnt(ModelSchemaResp domainSchemaDesc, Map<String, ItemUseResp> typeIdAndStatPair) {
List<MetricSchemaResp> metrics = domainSchemaDesc.getMetrics();
if (CollectionUtils.isEmpty(domainSchemaDesc.getMetrics())) {
return;
}
if (!CollectionUtils.isEmpty(metrics)) {
metrics.stream().forEach(metric -> {
String key = TypeEnums.METRIC.getName() + AT_SYMBOL + AT_SYMBOL + metric.getBizName();
if (typeIdAndStatPair.containsKey(key)) {
metric.setUseCnt(typeIdAndStatPair.get(key).getUseCnt());
}
});
}
domainSchemaDesc.setMetrics(metrics);
}
private void fillDimCnt(ModelSchemaResp domainSchemaDesc, Map<String, ItemUseResp> typeIdAndStatPair) {
List<DimSchemaResp> dimensions = domainSchemaDesc.getDimensions();
if (CollectionUtils.isEmpty(domainSchemaDesc.getDimensions())) {
return;
}
if (!CollectionUtils.isEmpty(dimensions)) {
dimensions.stream().forEach(dim -> {
String key = TypeEnums.DIMENSION.getName() + AT_SYMBOL + AT_SYMBOL + dim.getBizName();
if (typeIdAndStatPair.containsKey(key)) {
dim.setUseCnt(typeIdAndStatPair.get(key).getUseCnt());
}
});
}
domainSchemaDesc.setDimensions(dimensions);
}
@Override
public PageInfo<DimensionResp> queryDimension(PageDimensionReq pageDimensionCmd, User user) {
return dimensionService.queryDimension(pageDimensionCmd);
}
@Override
public PageInfo<MetricResp> queryMetric(PageMetricReq pageMetricReq, User user) {
return metricService.queryMetric(pageMetricReq, user);
}
@Override
public List<DomainResp> getDomainList(User user) {
return domainService.getDomainListWithAdminAuth(user);
}
@Override
public List<ModelResp> getModelList(User user, AuthType authTypeEnum, Long domainId) {
return modelService.getModelListWithAuth(user, domainId, authTypeEnum);
}
}

View File

@@ -0,0 +1,91 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
import com.tencent.supersonic.headless.core.parser.SqlParser;
import com.tencent.supersonic.headless.core.parser.calcite.CalciteSqlParser;
import com.tencent.supersonic.headless.core.parser.convert.DefaultDimValueConverter;
import com.tencent.supersonic.headless.core.parser.convert.ZipperModelConverter;
import com.tencent.supersonic.headless.core.executor.JdbcExecutor;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.optimizer.DetailQuery;
import com.tencent.supersonic.headless.core.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.parser.convert.CalculateAggConverter;
import com.tencent.supersonic.headless.core.parser.convert.MetricCheckConverter;
import com.tencent.supersonic.headless.core.parser.convert.ParserDefaultConverter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class ComponentFactory {
private static List<HeadlessConverter> headlessConverters = new ArrayList<>();
private static List<QueryExecutor> queryExecutors = new ArrayList<>();
private static Map<String, QueryOptimizer> queryOptimizers = new HashMap<>();
private static SqlParser sqlParser;
static {
initSemanticConverter();
initQueryExecutors();
initQueryOptimizer();
}
public static List<HeadlessConverter> getSemanticConverters() {
if (headlessConverters.isEmpty()) {
initSemanticConverter();
}
return headlessConverters;
}
public static List<QueryExecutor> getQueryExecutors() {
if (queryExecutors.isEmpty()) {
initQueryExecutors();
}
return queryExecutors;
}
public static List<QueryOptimizer> getQueryOptimizers() {
if (queryOptimizers.isEmpty()) {
initQueryOptimizer();
}
return queryOptimizers.values().stream().collect(Collectors.toList());
}
public static SqlParser getSqlParser() {
if (sqlParser == null) {
sqlParser = ContextUtils.getContext().getBean("CalciteSqlParser", CalciteSqlParser.class);
}
return sqlParser;
}
public static void setSqlParser(SqlParser parser) {
sqlParser = parser;
}
public static void addQueryOptimizer(String name, QueryOptimizer queryOptimizer) {
queryOptimizers.put(name, queryOptimizer);
}
public static <T> T getBean(String name, Class<T> tClass) {
return ContextUtils.getContext().getBean(name, tClass);
}
private static void initQueryOptimizer() {
queryOptimizers.put("DetailQuery", getBean("DetailQuery", DetailQuery.class));
}
private static void initSemanticConverter() {
headlessConverters.add(getBean("MetricCheckConverter", MetricCheckConverter.class));
headlessConverters.add(getBean("DefaultDimValueConverter", DefaultDimValueConverter.class));
headlessConverters.add(getBean("CalculateAggConverter", CalculateAggConverter.class));
headlessConverters.add(getBean("ParserDefaultConverter", ParserDefaultConverter.class));
headlessConverters.add(getBean("ZipperModelConverter", ZipperModelConverter.class));
}
private static void initQueryExecutors() {
queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor", JdbcExecutor.class));
}
}

View File

@@ -0,0 +1,71 @@
package com.tencent.supersonic.headless.core.utils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class DataTransformUtils {
public static List<Map<String, Object>> transform(List<Map<String, Object>> originalData, String metric,
List<String> groups, DateConf dateConf) {
List<String> dateList = dateConf.getDateList();
List<Map<String, Object>> transposedData = new ArrayList<>();
for (Map<String, Object> originalRow : originalData) {
Map<String, Object> transposedRow = new HashMap<>();
for (String key : originalRow.keySet()) {
if (groups.contains(key)) {
transposedRow.put(key, originalRow.get(key));
}
}
transposedRow.put(String.valueOf(originalRow.get(getTimeDimension(dateConf))),
originalRow.get(metric));
transposedData.add(transposedRow);
}
Map<String, List<Map<String, Object>>> dataMerge = transposedData.stream()
.collect(Collectors.groupingBy(row -> getRowKey(row, groups)));
List<Map<String, Object>> resultData = Lists.newArrayList();
for (List<Map<String, Object>> data : dataMerge.values()) {
Map<String, Object> rowData = new HashMap<>();
for (Map<String, Object> row : data) {
for (String key : row.keySet()) {
rowData.put(key, row.get(key));
}
}
for (String date : dateList) {
if (!rowData.containsKey(date)) {
rowData.put(date, "");
}
}
resultData.add(rowData);
}
return resultData;
}
private static String getRowKey(Map<String, Object> originalRow, List<String> groups) {
List<Object> values = Lists.newArrayList();
for (String key : originalRow.keySet()) {
if (groups.contains(key) && !TimeDimensionEnum.getNameList().contains(key)) {
values.add(originalRow.get(key));
}
}
return StringUtils.join(values, "_");
}
private static String getTimeDimension(DateConf dateConf) {
if (Constants.MONTH.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.MONTH.getName();
} else if (Constants.WEEK.equals(dateConf.getPeriod())) {
return TimeDimensionEnum.WEEK.getName();
} else {
return TimeDimensionEnum.DAY.getName();
}
}
}

View File

@@ -0,0 +1,493 @@
package com.tencent.supersonic.headless.core.utils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.DateConf.DateMode;
import com.tencent.supersonic.common.pojo.ItemDateResp;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.common.util.jsqlparser.FieldExpression;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserAddHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserRemoveHelper;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserSelectHelper;
import com.tencent.supersonic.headless.common.core.request.ParseSqlReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.server.pojo.ItemDateFilter;
import com.tencent.supersonic.headless.common.server.pojo.SchemaItem;
import com.tencent.supersonic.headless.common.server.request.ModelSchemaFilterReq;
import com.tencent.supersonic.headless.common.server.response.DimSchemaResp;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.MetricSchemaResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.core.service.SchemaService;
import com.tencent.supersonic.headless.server.pojo.EngineTypeEnum;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.logging.log4j.util.Strings;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.DAY;
import static com.tencent.supersonic.common.pojo.Constants.DAY_FORMAT;
import static com.tencent.supersonic.common.pojo.Constants.MONTH;
import static com.tencent.supersonic.common.pojo.Constants.UNDERLINE;
import static com.tencent.supersonic.common.pojo.Constants.WEEK;
@Slf4j
@Component
public class QueryStructUtils {
public static Set<String> internalTimeCols = new HashSet<>(
Arrays.asList("dayno", "sys_imp_date", "sys_imp_week", "sys_imp_month"));
public static Set<String> internalCols;
static {
internalCols = new HashSet<>(Arrays.asList("plat_sys_var"));
internalCols.addAll(internalTimeCols);
}
private final DateModeUtils dateModeUtils;
private final SqlFilterUtils sqlFilterUtils;
private final Catalog catalog;
@Value("${internal.metric.cnt.suffix:internal_cnt}")
private String internalMetricNameSuffix;
@Value("${metricParser.agg.mysql.lowVersion:5.7}")
private String mysqlLowVersion;
@Value("${metricParser.agg.ck.lowVersion:20.4}")
private String ckLowVersion;
@Autowired
private SchemaService schemaService;
private String variablePrefix = "'${";
public QueryStructUtils(
DateModeUtils dateModeUtils,
SqlFilterUtils sqlFilterUtils, Catalog catalog) {
this.dateModeUtils = dateModeUtils;
this.sqlFilterUtils = sqlFilterUtils;
this.catalog = catalog;
}
private List<Long> getDimensionIds(QueryStructReq queryStructCmd) {
List<Long> dimensionIds = new ArrayList<>();
MetaFilter metaFilter = new MetaFilter(queryStructCmd.getModelIds());
List<DimensionResp> dimensions = catalog.getDimensions(metaFilter);
Map<String, List<DimensionResp>> pair = dimensions.stream()
.collect(Collectors.groupingBy(DimensionResp::getBizName));
for (String group : queryStructCmd.getGroups()) {
if (pair.containsKey(group)) {
dimensionIds.add(pair.get(group).get(0).getId());
}
}
List<String> filtersCols = sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter());
for (String col : filtersCols) {
if (pair.containsKey(col)) {
dimensionIds.add(pair.get(col).get(0).getId());
}
}
return dimensionIds;
}
private List<Long> getMetricIds(QueryStructReq queryStructCmd) {
List<Long> metricIds = new ArrayList<>();
MetaFilter metaFilter = new MetaFilter(queryStructCmd.getModelIds());
List<MetricResp> metrics = catalog.getMetrics(metaFilter);
Map<String, List<MetricResp>> pair = metrics.stream().collect(Collectors.groupingBy(SchemaItem::getBizName));
for (Aggregator agg : queryStructCmd.getAggregators()) {
if (pair.containsKey(agg.getColumn())) {
metricIds.add(pair.get(agg.getColumn()).get(0).getId());
}
}
List<String> filtersCols = sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter());
for (String col : filtersCols) {
if (pair.containsKey(col)) {
metricIds.add(pair.get(col).get(0).getId());
}
}
return metricIds;
}
public String getDateWhereClause(QueryStructReq queryStructCmd) {
DateConf dateInfo = queryStructCmd.getDateInfo();
if (Objects.isNull(dateInfo) || Objects.isNull(dateInfo.getDateMode())) {
return "";
}
if (dateInfo.getDateMode().equals(DateMode.RECENT)) {
if (dateInfo.getUnit() <= 0) {
return "";
}
}
List<Long> dimensionIds = getDimensionIds(queryStructCmd);
List<Long> metricIds = getMetricIds(queryStructCmd);
ItemDateResp dateDate = catalog.getItemDate(
new ItemDateFilter(dimensionIds, TypeEnums.DIMENSION.getName()),
new ItemDateFilter(metricIds, TypeEnums.METRIC.getName()));
if (Objects.isNull(dateDate)
|| Strings.isEmpty(dateDate.getStartDate())
&& Strings.isEmpty(dateDate.getEndDate())) {
if (dateInfo.getDateMode().equals(DateMode.LIST)) {
return dateModeUtils.listDateStr(dateDate, dateInfo);
}
if (dateInfo.getDateMode().equals(DateMode.BETWEEN)) {
return dateModeUtils.betweenDateStr(dateDate, dateInfo);
}
if (dateModeUtils.hasAvailableDataMode(dateInfo)) {
return dateModeUtils.hasDataModeStr(dateDate, dateInfo);
}
return dateModeUtils.defaultRecentDateInfo(queryStructCmd.getDateInfo());
}
log.info("dateDate:{}", dateDate);
return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
}
public String generateWhere(QueryStructReq queryStructCmd) {
String whereClauseFromFilter = sqlFilterUtils.getWhereClause(queryStructCmd.getOriginalFilter());
String whereFromDate = getDateWhereClause(queryStructCmd);
return mergeDateWhereClause(queryStructCmd, whereClauseFromFilter, whereFromDate);
}
public String mergeDateWhereClause(QueryStructReq queryStructCmd, String whereClauseFromFilter,
String whereFromDate) {
if (Strings.isNotEmpty(whereFromDate) && Strings.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
} else if (Strings.isEmpty(whereFromDate) && Strings.isNotEmpty(whereClauseFromFilter)) {
return whereClauseFromFilter;
} else if (Strings.isNotEmpty(whereFromDate) && Strings.isEmpty(whereClauseFromFilter)) {
return whereFromDate;
} else if (Objects.isNull(whereFromDate) && Strings.isEmpty(whereClauseFromFilter)) {
log.info("the current date information is empty, enter the date initialization logic");
return dateModeUtils.defaultRecentDateInfo(queryStructCmd.getDateInfo());
}
return whereClauseFromFilter;
}
public Set<String> getResNameEn(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = new HashSet<>();
queryStructCmd.getAggregators().stream().forEach(agg -> resNameEnSet.add(agg.getColumn()));
resNameEnSet.addAll(queryStructCmd.getGroups());
queryStructCmd.getOrders().stream().forEach(order -> resNameEnSet.add(order.getColumn()));
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter()).stream().forEach(col -> resNameEnSet.add(col));
return resNameEnSet;
}
public Set<String> getResName(QueryS2SQLReq queryS2SQLReq) {
Set<String> resNameSet = SqlParserSelectHelper.getAllFields(queryS2SQLReq.getSql())
.stream().collect(Collectors.toSet());
return resNameSet;
}
public Set<String> getResNameEnExceptInternalCol(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = getResNameEn(queryStructCmd);
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
public Set<String> getResNameEnExceptInternalCol(QueryS2SQLReq queryS2SQLReq, User user) {
Set<String> resNameSet = getResName(queryS2SQLReq);
Set<String> resNameEnSet = new HashSet<>();
ModelSchemaFilterReq filter = new ModelSchemaFilterReq();
List<Long> modelIds = Lists.newArrayList(queryS2SQLReq.getModelIds());
filter.setModelIds(modelIds);
List<ModelSchemaResp> modelSchemaRespList = schemaService.fetchModelSchema(filter, user);
if (!CollectionUtils.isEmpty(modelSchemaRespList)) {
List<MetricSchemaResp> metrics = modelSchemaRespList.get(0).getMetrics();
List<DimSchemaResp> dimensions = modelSchemaRespList.get(0).getDimensions();
metrics.stream().forEach(o -> {
if (resNameSet.contains(o.getName())) {
resNameEnSet.add(o.getBizName());
}
});
dimensions.stream().forEach(o -> {
if (resNameSet.contains(o.getName())) {
resNameEnSet.add(o.getBizName());
}
});
}
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
public Set<String> getFilterResNameEn(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = new HashSet<>();
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter()).stream().forEach(col -> resNameEnSet.add(col));
return resNameEnSet;
}
public Set<String> getFilterResNameEnExceptInternalCol(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = getFilterResNameEn(queryStructCmd);
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
public Set<String> getFilterResNameEnExceptInternalCol(QueryS2SQLReq queryS2SQLReq) {
String sql = queryS2SQLReq.getSql();
Set<String> resNameEnSet = SqlParserSelectHelper.getWhereFields(sql).stream().collect(Collectors.toSet());
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
public String generateInternalMetricName(Long modelId, List<String> groups) {
String internalMetricNamePrefix = "";
MetaFilter modelFilter = new MetaFilter(Collections.singletonList(modelId));
List<DimensionResp> dimensions = catalog.getDimensions(modelFilter);
if (!CollectionUtils.isEmpty(dimensions)) {
internalMetricNamePrefix = dimensions.get(0).getModelBizName();
}
return internalMetricNamePrefix + UNDERLINE + internalMetricNameSuffix;
}
public boolean isSupportWith(EngineTypeEnum engineTypeEnum, String version) {
if (engineTypeEnum.equals(EngineTypeEnum.MYSQL) && Objects.nonNull(version) && version.startsWith(
mysqlLowVersion)) {
return false;
}
if (engineTypeEnum.equals(EngineTypeEnum.CLICKHOUSE) && Objects.nonNull(version)
&& StringUtil.compareVersion(version,
ckLowVersion) < 0) {
return false;
}
return true;
}
public String generateZipperWhere(QueryStructReq queryStructCmd) {
String whereClauseFromFilter = sqlFilterUtils.getWhereClause(queryStructCmd.getOriginalFilter());
String whereFromDate = getZipperDateWhereClause(queryStructCmd);
return mergeDateWhereClause(queryStructCmd, whereClauseFromFilter, whereFromDate);
}
public String generateZipperWhere(QueryStructReq queryStructCmd, ParseSqlReq parseSqlReq) {
if (Objects.nonNull(parseSqlReq.getSql()) && !CollectionUtils.isEmpty(parseSqlReq.getTables())
&& Objects.nonNull(queryStructCmd.getDateInfo())) {
String sql = SqlParserRemoveHelper.removeWhere(parseSqlReq.getSql(),
dateModeUtils.getDateCol());
parseSqlReq.getTables().stream().forEach(t -> {
if (Objects.nonNull(t)) {
List<String> dimensions = new ArrayList<>();
if (!CollectionUtils.isEmpty(t.getDimensions())) {
dimensions.addAll(t.getDimensions().stream()
.filter(d -> !dateModeUtils.getDateCol().contains(d.toLowerCase())).collect(
Collectors.toList()));
}
dimensions.add(dateModeUtils.getDateColBegin(queryStructCmd.getDateInfo()));
dimensions.add(dateModeUtils.getDateColEnd(queryStructCmd.getDateInfo()));
t.setDimensions(dimensions);
}
});
return SqlParserAddHelper.addWhere(sql,
SqlParserSelectHelper.getTimeFilter(getTimeRanges(queryStructCmd),
dateModeUtils.getDateColBegin(queryStructCmd.getDateInfo()),
dateModeUtils.getDateColEnd(queryStructCmd.getDateInfo())));
}
return parseSqlReq.getSql();
}
public String getZipperDateWhereClause(QueryStructReq queryStructCmd) {
List<ImmutablePair<String, String>> timeRanges = getTimeRanges(queryStructCmd);
List<String> wheres = new ArrayList<>();
if (!CollectionUtils.isEmpty(timeRanges)) {
for (ImmutablePair<String, String> range : timeRanges) {
String strWhere = dateModeUtils.getDateWhereStr(queryStructCmd.getDateInfo(), range);
if (!strWhere.isEmpty()) {
wheres.add(strWhere);
}
}
if (!wheres.isEmpty()) {
return wheres.stream().collect(Collectors.joining(" or ", "(", ")"));
}
}
return "";
}
public Triple<String, String, String> getBeginEndTime(QueryStructReq queryStructCmd) {
if (Objects.isNull(queryStructCmd.getDateInfo())) {
return Triple.of("", "", "");
}
DateConf dateConf = queryStructCmd.getDateInfo();
String dateInfo = dateModeUtils.getSysDateCol(dateConf);
if (dateInfo.isEmpty()) {
return Triple.of("", "", "");
}
switch (dateConf.getDateMode()) {
case AVAILABLE:
case BETWEEN:
return Triple.of(dateInfo, dateConf.getStartDate(), dateConf.getEndDate());
case LIST:
return Triple.of(dateInfo, Collections.min(dateConf.getDateList()),
Collections.max(dateConf.getDateList()));
case RECENT:
ItemDateResp dateDate = getItemDateResp(queryStructCmd);
LocalDate dateMax = LocalDate.now().minusDays(1);
LocalDate dateMin = dateMax.minusDays(dateConf.getUnit() - 1);
if (Objects.isNull(dateDate)) {
return Triple.of(dateInfo, dateMin.format(DateTimeFormatter.ofPattern(DAY_FORMAT)),
dateMax.format(DateTimeFormatter.ofPattern(DAY_FORMAT)));
}
switch (dateConf.getPeriod()) {
case DAY:
ImmutablePair<String, String> dayInfo = dateModeUtils.recentDay(dateDate, dateConf);
return Triple.of(dateInfo, dayInfo.left, dayInfo.right);
case WEEK:
ImmutablePair<String, String> weekInfo = dateModeUtils.recentWeek(dateDate, dateConf);
return Triple.of(dateInfo, weekInfo.left, weekInfo.right);
case MONTH:
List<ImmutablePair<String, String>> rets = dateModeUtils.recentMonth(dateDate, dateConf);
Optional<String> minBegins = rets.stream().map(i -> i.left).sorted().findFirst();
Optional<String> maxBegins = rets.stream().map(i -> i.right).sorted(Comparator.reverseOrder())
.findFirst();
if (minBegins.isPresent() && maxBegins.isPresent()) {
return Triple.of(dateInfo, minBegins.get(), maxBegins.get());
}
break;
default:
break;
}
break;
default:
break;
}
return Triple.of("", "", "");
}
public List<ImmutablePair<String, String>> getTimeRanges(QueryStructReq queryStructCmd) {
List<ImmutablePair<String, String>> ret = new ArrayList<>();
if (Objects.isNull(queryStructCmd) || Objects.isNull(queryStructCmd.getDateInfo())) {
return ret;
}
DateConf dateConf = queryStructCmd.getDateInfo();
switch (dateConf.getDateMode()) {
case AVAILABLE:
case BETWEEN:
ret.add(ImmutablePair.of(dateConf.getStartDate(), dateConf.getEndDate()));
break;
case LIST:
for (String date : dateConf.getDateList()) {
ret.add(ImmutablePair.of(date, date));
}
break;
case RECENT:
ItemDateResp dateDate = getItemDateResp(queryStructCmd);
LocalDate dateMax = LocalDate.now().minusDays(1);
LocalDate dateMin = dateMax.minusDays(dateConf.getUnit() - 1);
if (Objects.isNull(dateDate)) {
ret.add(ImmutablePair.of(dateMin.format(DateTimeFormatter.ofPattern(DAY_FORMAT)),
dateMax.format(DateTimeFormatter.ofPattern(DAY_FORMAT))));
break;
}
switch (dateConf.getPeriod()) {
case DAY:
ret.add(dateModeUtils.recentDay(dateDate, dateConf));
break;
case WEEK:
ret.add(dateModeUtils.recentWeek(dateDate, dateConf));
break;
case MONTH:
List<ImmutablePair<String, String>> rets = dateModeUtils.recentMonth(dateDate, dateConf);
ret.addAll(rets);
break;
default:
break;
}
break;
default:
break;
}
return ret;
}
private ItemDateResp getItemDateResp(QueryStructReq queryStructCmd) {
List<Long> dimensionIds = getDimensionIds(queryStructCmd);
List<Long> metricIds = getMetricIds(queryStructCmd);
ItemDateResp dateDate = catalog.getItemDate(
new ItemDateFilter(dimensionIds, TypeEnums.DIMENSION.getName()),
new ItemDateFilter(metricIds, TypeEnums.METRIC.getName()));
return dateDate;
}
public DateConf getDateConfBySql(String sql) {
List<FieldExpression> fieldExpressions = SqlParserSelectHelper.getFilterExpression(sql);
if (!CollectionUtils.isEmpty(fieldExpressions)) {
Set<String> dateList = new HashSet<>();
String startDate = "";
String endDate = "";
String period = "";
for (FieldExpression f : fieldExpressions) {
if (Objects.isNull(f.getFieldName()) || !internalCols.contains(f.getFieldName().toLowerCase())) {
continue;
}
if (Objects.isNull(f.getFieldValue()) || !dateModeUtils.isDateStr(f.getFieldValue().toString())) {
continue;
}
period = dateModeUtils.getPeriodByCol(f.getFieldName().toLowerCase());
if ("".equals(period)) {
continue;
}
if ("=".equals(f.getOperator())) {
dateList.add(f.getFieldValue().toString());
} else if ("<".equals(f.getOperator()) || "<=".equals(f.getOperator())) {
if (startDate.isEmpty() || startDate.compareTo(f.getFieldValue().toString()) > 0) {
startDate = f.getFieldValue().toString();
}
} else if (">".equals(f.getOperator()) || ">=".equals(f.getOperator())) {
if (endDate.isEmpty() || endDate.compareTo(f.getFieldValue().toString()) < 0) {
endDate = f.getFieldValue().toString();
}
}
}
if (!"".equals(period)) {
DateConf dateConf = new DateConf();
dateConf.setPeriod(period);
if (!CollectionUtils.isEmpty(dateList)) {
dateConf.setDateList(new ArrayList<>(dateList));
dateConf.setDateMode(DateMode.LIST);
return dateConf;
}
if (!"".equals(startDate) && !"".equals(endDate)) {
dateConf.setStartDate(startDate);
dateConf.setEndDate(endDate);
dateConf.setDateMode(DateMode.BETWEEN);
return dateConf;
}
}
}
return null;
}
public List<String> getDateCol() {
return dateModeUtils.getDateCol();
}
public String getVariablePrefix() {
return variablePrefix;
}
}

View File

@@ -0,0 +1,255 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.headless.common.core.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.common.server.enums.SemanticType;
import com.tencent.supersonic.headless.common.server.response.DimensionResp;
import com.tencent.supersonic.headless.common.server.response.MetricResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.Catalog;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.PostConstruct;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.JOIN_UNDERLINE;
import static com.tencent.supersonic.common.pojo.Constants.UNIONALL;
@Slf4j
@Component
public class QueryUtils {
private static final String pattern = "\\(`(.*?)`\\)";
private static final String no_quotation_pattern = "\\((.*?)\\)";
private final Set<Pattern> patterns = new HashSet<>();
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
@Value("${query.optimizer.enable:true}")
private Boolean optimizeEnable;
private final CacheUtils cacheUtils;
private final StatUtils statUtils;
private final Catalog catalog;
public QueryUtils(
CacheUtils cacheUtils, StatUtils statUtils, Catalog catalog) {
this.cacheUtils = cacheUtils;
this.statUtils = statUtils;
this.catalog = catalog;
}
@PostConstruct
public void fillPattern() {
Set<String> aggFunctions = new HashSet<>(Arrays.asList("MAX", "MIN", "SUM", "AVG"));
String patternStr = "\\s*(%s\\((.*)\\)) AS";
for (String agg : aggFunctions) {
patterns.add(Pattern.compile(String.format(patternStr, agg)));
}
}
public void fillItemNameInfo(QueryResultWithSchemaResp queryResultWithColumns, List<Long> modelIds) {
MetaFilter metaFilter = new MetaFilter(modelIds);
List<MetricResp> metricDescList = catalog.getMetrics(metaFilter);
List<DimensionResp> dimensionDescList = catalog.getDimensions(metaFilter);
Map<String, MetricResp> metricRespMap =
metricDescList.stream().collect(Collectors.toMap(MetricResp::getBizName, a -> a, (k1, k2) -> k1));
Map<String, String> namePair = new HashMap<>();
Map<String, String> nameTypePair = new HashMap<>();
addSysTimeDimension(namePair, nameTypePair);
metricDescList.forEach(metricDesc -> {
namePair.put(metricDesc.getBizName(), metricDesc.getName());
nameTypePair.put(metricDesc.getBizName(), SemanticType.NUMBER.name());
});
dimensionDescList.forEach(dimensionDesc -> {
namePair.put(dimensionDesc.getBizName(), dimensionDesc.getName());
nameTypePair.put(dimensionDesc.getBizName(), dimensionDesc.getSemanticType());
});
List<QueryColumn> columns = queryResultWithColumns.getColumns();
columns.forEach(column -> {
String nameEn = column.getNameEn().toLowerCase();
if (nameEn.contains(JOIN_UNDERLINE)) {
nameEn = nameEn.split(JOIN_UNDERLINE)[1];
}
if (namePair.containsKey(nameEn)) {
column.setName(namePair.get(nameEn));
} else {
String nameEnByRegex = getNameEnByRegex(nameEn, pattern);
if (StringUtils.isEmpty(nameEnByRegex)) {
nameEnByRegex = getNameEnByRegex(nameEn, no_quotation_pattern);
}
if (StringUtils.isNotEmpty(nameEnByRegex) && StringUtils.isNotEmpty(namePair.get(nameEnByRegex))) {
String filedName = namePair.get(nameEnByRegex);
column.setName(nameEn.replaceAll(nameEnByRegex, filedName));
}
}
if (nameTypePair.containsKey(nameEn)) {
column.setShowType(nameTypePair.get(nameEn));
}
if (!nameTypePair.containsKey(nameEn) && isNumberType(column.getType())) {
column.setShowType(SemanticType.NUMBER.name());
}
if (metricRespMap.containsKey(nameEn)) {
column.setDataFormatType(metricRespMap.get(nameEn).getDataFormatType());
column.setDataFormat(metricRespMap.get(nameEn).getDataFormat());
}
if (StringUtils.isEmpty(column.getShowType())) {
column.setShowType(SemanticType.CATEGORY.name());
}
});
}
public void fillItemNameInfo(QueryResultWithSchemaResp queryResultWithColumns,
QueryMultiStructReq queryMultiStructCmd) {
List<Aggregator> aggregators = queryMultiStructCmd.getQueryStructReqs().stream()
.flatMap(queryStructCmd -> queryStructCmd.getAggregators().stream())
.collect(Collectors.toList());
log.info("multi agg merge:{}", aggregators);
Map<String, String> metricNameFromAgg = getMetricNameFromAgg(aggregators);
log.info("metricNameFromAgg:{}", metricNameFromAgg);
Map<String, String> namePair = new HashMap<>();
Map<String, String> nameTypePair = new HashMap<>();
addSysTimeDimension(namePair, nameTypePair);
namePair.putAll(metricNameFromAgg);
List<QueryColumn> columns = queryResultWithColumns.getColumns();
columns.forEach(column -> {
String nameEn = column.getNameEn().toLowerCase();
if (nameEn.contains(JOIN_UNDERLINE)) {
nameEn = nameEn.split(JOIN_UNDERLINE)[1];
}
if (namePair.containsKey(nameEn)) {
column.setName(namePair.get(nameEn));
} else {
if (nameEn.startsWith("name")) {
column.setName("名称");
} else if (nameEn.startsWith("value")) {
column.setName("指标值");
}
}
if (nameTypePair.containsKey(nameEn)) {
column.setShowType(nameTypePair.get(nameEn));
} else {
if (nameEn.startsWith("name")) {
column.setShowType("CATEGORY");
} else if (nameEn.startsWith("value")) {
column.setShowType("NUMBER");
}
}
});
}
private String getNameEnByRegex(String nameEn, String pattern) {
Pattern p = Pattern.compile(pattern);
Matcher m = p.matcher(nameEn);
if (m.find()) {
String result = m.group(1);
return result;
}
return null;
}
private boolean isNumberType(String type) {
if (StringUtils.isBlank(type)) {
return false;
}
if (type.equalsIgnoreCase("int") || type.equalsIgnoreCase("bigint")
|| type.equalsIgnoreCase("float") || type.equalsIgnoreCase("double")) {
return true;
}
if (type.toLowerCase().startsWith("uint") || type.toLowerCase().startsWith("int")) {
return true;
}
return false;
}
private Map<String, String> getMetricNameFromAgg(List<Aggregator> aggregators) {
Map<String, String> map = new HashMap<>();
if (CollectionUtils.isEmpty(aggregators)) {
return map;
}
for (int i = 0; i < aggregators.size(); i++) {
Aggregator aggregator = aggregators.get(i);
if (StringUtils.isBlank(aggregator.getNameCh())) {
continue;
}
map.put("value" + (i + 1), aggregator.getNameCh());
}
return map;
}
private static void addSysTimeDimension(Map<String, String> namePair, Map<String, String> nameTypePair) {
for (TimeDimensionEnum timeDimensionEnum : TimeDimensionEnum.values()) {
namePair.put(timeDimensionEnum.getName(), "date");
nameTypePair.put(timeDimensionEnum.getName(), "DATE");
}
}
public void checkSqlParse(QueryStatement sqlParser) {
if (com.google.common.base.Strings.isNullOrEmpty(sqlParser.getSql())
|| com.google.common.base.Strings.isNullOrEmpty(sqlParser.getSourceId())) {
throw new RuntimeException("parse Exception: " + sqlParser.getErrMsg());
}
}
public QueryStatement sqlParserUnion(QueryMultiStructReq queryMultiStructCmd, List<QueryStatement> sqlParsers) {
QueryStatement sqlParser = new QueryStatement();
StringBuilder unionSqlBuilder = new StringBuilder();
for (int i = 0; i < sqlParsers.size(); i++) {
String selectStr = SqlGenerateUtils.getUnionSelect(queryMultiStructCmd.getQueryStructReqs().get(i));
unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s",
selectStr,
sqlParsers.get(i).getSql(), i));
unionSqlBuilder.append(UNIONALL);
}
String unionSql = unionSqlBuilder.substring(0, unionSqlBuilder.length() - Constants.UNIONALL.length());
sqlParser.setSql(unionSql);
sqlParser.setSourceId(sqlParsers.get(0).getSourceId());
log.info("union sql parser:{}", sqlParser);
return sqlParser;
}
public void cacheResultLogic(String key, QueryResultWithSchemaResp queryResultWithColumns) {
if (cacheEnable && Objects.nonNull(queryResultWithColumns) && !CollectionUtils.isEmpty(
queryResultWithColumns.getResultList())) {
QueryResultWithSchemaResp finalQueryResultWithColumns = queryResultWithColumns;
CompletableFuture.supplyAsync(() -> cacheUtils.put(key, finalQueryResultWithColumns))
.exceptionally(exception -> {
log.warn("exception:", exception);
return null;
});
statUtils.updateResultCacheKey(key);
log.info("add record to cache, key:{}", key);
}
}
public Boolean enableOptimize() {
return optimizeEnable;
}
}

View File

@@ -0,0 +1,85 @@
package com.tencent.supersonic.headless.core.utils;
import static com.tencent.supersonic.common.pojo.Constants.JOIN_UNDERLINE;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
@Slf4j
public class SqlGenerateUtils {
public static String getUnionSelect(QueryStructReq queryStructCmd) {
StringBuilder sb = new StringBuilder();
int locate = 0;
for (String group : queryStructCmd.getGroups()) {
if (group.contains(JOIN_UNDERLINE)) {
group = group.split(JOIN_UNDERLINE)[1];
}
if (!TimeDimensionEnum.getNameList().contains(group)) {
locate++;
sb.append(group).append(" as ").append("name").append(locate).append(",");
} else {
sb.append(group).append(",");
}
}
locate = 0;
for (Aggregator agg : queryStructCmd.getAggregators()) {
locate++;
sb.append(agg.getColumn()).append(" as ").append("value").append(locate).append(",");
}
String selectSql = sb.substring(0, sb.length() - 1);
log.info("union select sql {}", selectSql);
return selectSql;
}
public String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd.getLimit() > 0) {
return " limit " + queryStructCmd.getLimit();
}
return "";
}
public String getSelect(QueryStructReq queryStructCmd) {
String aggStr = queryStructCmd.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
public String getSelectField(final Aggregator agg) {
if (AggOperatorEnum.COUNT_DISTINCT.equals(agg.getFunc())) {
return "count(distinct " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
if (CollectionUtils.isEmpty(agg.getArgs())) {
return agg.getFunc() + "( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
return agg.getFunc() + "( " + agg.getArgs().stream().map(arg ->
arg.equals(agg.getColumn()) ? arg : (StringUtils.isNumeric(arg) ? arg : ("'" + arg + "'"))
).collect(Collectors.joining(",")) + " ) AS " + agg.getColumn() + " ";
}
public String getGroupBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
return "";
}
return "group by " + String.join(",", queryStructCmd.getGroups());
}
public String getOrderBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getOrders())) {
return "";
}
return "order by " + queryStructCmd.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
}

View File

@@ -0,0 +1,186 @@
package com.tencent.supersonic.headless.core.utils;
import com.alibaba.ttl.TransmittableThreadLocal;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.jsqlparser.SqlParserSelectHelper;
import com.tencent.supersonic.headless.common.core.request.ItemUseReq;
import com.tencent.supersonic.headless.common.core.request.QueryS2SQLReq;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.common.core.response.ItemUseResp;
import com.tencent.supersonic.headless.common.core.enums.QueryOptMode;
import com.tencent.supersonic.headless.common.core.enums.QueryTypeBack;
import com.tencent.supersonic.headless.common.server.enums.QueryType;
import com.tencent.supersonic.headless.common.server.pojo.QueryStat;
import com.tencent.supersonic.headless.common.server.pojo.SchemaItem;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.core.persistence.repository.StatRepository;
import com.tencent.supersonic.headless.server.service.ModelService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
@Component
@Slf4j
public class StatUtils {
private static final TransmittableThreadLocal<QueryStat> STATS = new TransmittableThreadLocal<>();
private final StatRepository statRepository;
private final SqlFilterUtils sqlFilterUtils;
private final ModelService modelService;
private final ObjectMapper objectMapper = new ObjectMapper();
public StatUtils(StatRepository statRepository,
SqlFilterUtils sqlFilterUtils,
ModelService modelService) {
this.statRepository = statRepository;
this.sqlFilterUtils = sqlFilterUtils;
this.modelService = modelService;
}
public static QueryStat get() {
return STATS.get();
}
public static void set(QueryStat queryStatInfo) {
STATS.set(queryStatInfo);
}
public static void remove() {
STATS.remove();
}
public void statInfo2DbAsync(TaskStatusEnum state) {
QueryStat queryStatInfo = get();
queryStatInfo.setElapsedMs(System.currentTimeMillis() - queryStatInfo.getStartTime());
queryStatInfo.setQueryState(state.getStatus());
log.info("queryStatInfo: {}", queryStatInfo);
CompletableFuture.runAsync(() -> {
statRepository.createRecord(queryStatInfo);
}).exceptionally(exception -> {
log.warn("queryStatInfo, exception:", exception);
return null;
});
remove();
}
public Boolean updateResultCacheKey(String key) {
STATS.get().setResultCacheKey(key);
return true;
}
public void initStatInfo(QueryS2SQLReq queryS2SQLReq, User facadeUser) {
QueryStat queryStatInfo = new QueryStat();
List<String> allFields = SqlParserSelectHelper.getAllFields(queryS2SQLReq.getSql());
queryStatInfo.setModelId(queryS2SQLReq.getModelIds().get(0));
ModelSchemaResp modelSchemaResp = modelService.fetchSingleModelSchema(queryS2SQLReq.getModelIds().get(0));
List<String> dimensions = new ArrayList<>();
List<String> metrics = new ArrayList<>();
if (Objects.nonNull(modelSchemaResp)) {
dimensions = getFieldNames(allFields, modelSchemaResp.getDimensions());
metrics = getFieldNames(allFields, modelSchemaResp.getMetrics());
}
String userName = getUserName(facadeUser);
try {
queryStatInfo.setTraceId("")
.setModelId(queryS2SQLReq.getModelIds().get(0))
.setUser(userName)
.setQueryType(QueryType.SQL.getValue())
.setQueryTypeBack(QueryTypeBack.NORMAL.getState())
.setQuerySqlCmd(queryS2SQLReq.toString())
.setQuerySqlCmdMd5(DigestUtils.md5Hex(queryS2SQLReq.toString()))
.setStartTime(System.currentTimeMillis())
.setUseResultCache(true)
.setUseSqlCache(true)
.setMetrics(objectMapper.writeValueAsString(metrics))
.setDimensions(objectMapper.writeValueAsString(dimensions));
} catch (JsonProcessingException e) {
log.error("initStatInfo:{}", e);
}
StatUtils.set(queryStatInfo);
}
public void initStatInfo(QueryStructReq queryStructCmd, User facadeUser) {
QueryStat queryStatInfo = new QueryStat();
String traceId = "";
List<String> dimensions = queryStructCmd.getGroups();
List<String> metrics = new ArrayList<>();
queryStructCmd.getAggregators().stream().forEach(aggregator -> metrics.add(aggregator.getColumn()));
String user = getUserName(facadeUser);
try {
queryStatInfo.setTraceId(traceId)
.setModelId(1L)
.setUser(user)
.setQueryType(QueryType.STRUCT.getValue())
.setQueryTypeBack(QueryTypeBack.NORMAL.getState())
.setQueryStructCmd(queryStructCmd.toString())
.setQueryStructCmdMd5(DigestUtils.md5Hex(queryStructCmd.toString()))
.setStartTime(System.currentTimeMillis())
.setNativeQuery(queryStructCmd.getQueryType().isNativeAggQuery())
.setGroupByCols(objectMapper.writeValueAsString(queryStructCmd.getGroups()))
.setAggCols(objectMapper.writeValueAsString(queryStructCmd.getAggregators()))
.setOrderByCols(objectMapper.writeValueAsString(queryStructCmd.getOrders()))
.setFilterCols(objectMapper.writeValueAsString(
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter())))
.setUseResultCache(true)
.setUseSqlCache(true)
.setMetrics(objectMapper.writeValueAsString(metrics))
.setDimensions(objectMapper.writeValueAsString(dimensions))
.setQueryOptMode(QueryOptMode.NONE.name());
} catch (JsonProcessingException e) {
e.printStackTrace();
}
StatUtils.set(queryStatInfo);
}
private List<String> getFieldNames(List<String> allFields, List<? extends SchemaItem> schemaItems) {
Set<String> fieldNames = schemaItems
.stream()
.map(dimSchemaResp -> dimSchemaResp.getBizName())
.collect(Collectors.toSet());
if (!CollectionUtils.isEmpty(fieldNames)) {
return allFields.stream().filter(fieldName -> fieldNames.contains(fieldName))
.collect(Collectors.toList());
}
return new ArrayList<>();
}
private String getUserName(User facadeUser) {
return (Objects.nonNull(facadeUser) && Strings.isNotEmpty(facadeUser.getName())) ? facadeUser.getName()
: "Admin";
}
public Boolean updateQueryOptMode(String mode) {
STATS.get().setQueryOptMode(mode);
return true;
}
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend) {
return statRepository.getStatInfo(itemUseCommend);
}
public List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend) {
return statRepository.getQueryStatInfoWithoutCache(itemUseCommend);
}
}

View File

@@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.tencent.supersonic.headless.core.persistence.mapper.StatMapper">
<resultMap id="QueryStatDO"
type="com.tencent.supersonic.headless.common.server.pojo.QueryStat">
<id column="id" property="id"/>
<result column="trace_id" property="traceId"/>
<result column="model_id" property="modelId"/>
<result column="user" property="user"/>
<result column="created_at" property="createdAt"/>
<result column="query_type" property="queryType"/>
<result column="query_type_back" property="queryTypeBack"/>
<result column="query_sql_cmd" property="querySqlCmd"/>
<result column="sql_cmd_md5" property="querySqlCmdMd5"/>
<result column="query_struct_cmd" property="queryStructCmd"/>
<result column="struct_cmd_md5" property="queryStructCmdMd5"/>
<result column="sql" property="sql"/>
<result column="sql_md5" property="sqlMd5"/>
<result column="query_engine" property="queryEngine"/>
<result column="elapsed_ms" property="elapsedMs"/>
<result column="query_state" property="queryState"/>
<result column="native_query" property="nativeQuery"/>
<result column="start_date" property="startDate"/>
<result column="end_date" property="endDate"/>
<result column="dimensions" property="dimensions"/>
<result column="metrics" property="metrics"/>
<result column="select_cols" property="selectCols"/>
<result column="agg_cols" property="aggCols"/>
<result column="filter_cols" property="filterCols"/>
<result column="group_by_cols" property="groupByCols"/>
<result column="order_by_cols" property="orderByCols"/>
<result column="use_result_cache" property="useResultCache"/>
<result column="use_sql_cache" property="useSqlCache"/>
<result column="sql_cache_key" property="sqlCacheKey"/>
<result column="result_cache_key" property="resultCacheKey"/>
<result column="query_opt_mode" property="queryOptMode"/>
</resultMap>
<insert id="createRecord">
insert into s2_query_stat_info
(
trace_id, model_id, `user`, query_type, query_type_back, query_sql_cmd, sql_cmd_md5, query_struct_cmd, struct_cmd_md5, `sql`, sql_md5, query_engine,
elapsed_ms, query_state, native_query, start_date, end_date, dimensions, metrics, select_cols, agg_cols, filter_cols, group_by_cols,
order_by_cols, use_result_cache, use_sql_cache, sql_cache_key, result_cache_key, query_opt_mode
)
values
(
#{traceId}, #{modelId}, #{user}, #{queryType}, #{queryTypeBack}, #{querySqlCmd}, #{querySqlCmdMd5}, #{queryStructCmd}, #{queryStructCmdMd5}, #{sql}, #{sqlMd5}, #{queryEngine},
#{elapsedMs}, #{queryState}, #{nativeQuery}, #{startDate}, #{endDate}, #{dimensions}, #{metrics}, #{selectCols}, #{aggCols}, #{filterCols}, #{groupByCols},
#{orderByCols}, #{useResultCache}, #{useSqlCache}, #{sqlCacheKey}, #{resultCacheKey}, #{queryOptMode}
)
</insert>
<select id="getStatInfo"
resultType="com.tencent.supersonic.headless.common.server.pojo.QueryStat">
select *
from s2_query_stat_info
<where>
<if test="startTime != null">
and start_time >= #{startTime}
</if>
<if test="modelId != null">
and model_id = #{modelId}
</if>
<if test="modelIds != null and modelIds.size() > 0">
and model_id in
<foreach item="id" collection="modelIds" open="(" separator="," close=")">
#{id}
</foreach>
</if>
<if test="metric != null">
and metrics like concat('%',#{metric},'%')
</if>
</where>
</select>
</mapper>

View File

@@ -0,0 +1,255 @@
package com.tencent.supersonic.headless.query.domain.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.common.core.response.SqlParserResp;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.headless.common.core.enums.AggOption;
import com.tencent.supersonic.headless.common.core.request.MetricReq;
import com.tencent.supersonic.headless.core.parser.calcite.HeadlessSchemaManager;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
@Slf4j
class HeadlessParserServiceTest {
private static Map<String, HeadlessSchema> headlessSchemaMap = new HashMap<>();
public static SqlParserResp parser(HeadlessSchema headlessSchema, MetricReq metricCommand, boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp();
if (metricCommand.getRootPath().isEmpty()) {
sqlParser.setErrMsg("rootPath empty");
return sqlParser;
}
try {
if (headlessSchema == null) {
sqlParser.setErrMsg("headlessSchema not found");
return sqlParser;
}
AggPlanner aggBuilder = new AggPlanner(headlessSchema);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricReq(metricCommand);
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
sqlParser.setSql(aggBuilder.getSql());
sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());
log.error("parser error MetricCommand[{}] error [{}]", metricCommand, e);
}
return sqlParser;
}
public void test() throws Exception {
DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setName("s2_pv_uv_statis");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("sum");
measure.setName("s2_pv_uv_statis_pv");
measure.setExpr("pv");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
MeasureYamlTpl measure2 = new MeasureYamlTpl();
measure2.setAgg("count");
measure2.setName("s2_pv_uv_statis_internal_cnt");
measure2.setExpr("1");
measure2.setCreateMetric("true");
measures.add(measure2);
MeasureYamlTpl measure3 = new MeasureYamlTpl();
measure3.setAgg("count");
measure3.setName("s2_pv_uv_statis_uv");
measure3.setExpr("uv");
measure3.setCreateMetric("true");
measures.add(measure3);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension2 = new DimensionYamlTpl();
dimension2.setName("sys_imp_date");
dimension2.setExpr("imp_date");
dimension2.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams2 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams2.setIsPrimary("true");
dimensionTimeTypeParams2.setTimeGranularity("day");
dimension2.setTypeParams(dimensionTimeTypeParams2);
dimensions.add(dimension2);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("day");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
HeadlessSchema headlessSchema = HeadlessSchema.newBuilder("s2").build();
HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("page");
dimension1.setName("page");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
HeadlessSchemaManager.update(headlessSchema, "s2_pv_uv_statis",
HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
MetricYamlTpl metric1 = new MetricYamlTpl();
metric1.setName("pv");
metric1.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures1 = new ArrayList<>();
MeasureYamlTpl measure1 = new MeasureYamlTpl();
measure1.setName("s2_pv_uv_statis_pv");
measures1.add(measure1);
metricTypeParams.setMeasures(measures1);
metricTypeParams.setExpr("s2_pv_uv_statis_pv");
metric1.setTypeParams(metricTypeParams);
List<MetricYamlTpl> metric = new ArrayList<>();
metric.add(metric1);
MetricYamlTpl metric2 = new MetricYamlTpl();
metric2.setName("uv");
metric2.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures2 = new ArrayList<>();
MeasureYamlTpl measure4 = new MeasureYamlTpl();
measure4.setName("s2_pv_uv_statis_uv");
measures2.add(measure4);
metricTypeParams1.setMeasures(measures2);
metricTypeParams1.setExpr("s2_pv_uv_statis_uv");
metric2.setTypeParams(metricTypeParams1);
metric.add(metric2);
HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
MetricReq metricCommand = new MetricReq();
metricCommand.setRootPath("s2");
metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand.setWhere("user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand.setLimit(1000L);
List<ColumnOrder> orders = new ArrayList<>();
orders.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand.setOrder(orders);
System.out.println(parser(headlessSchema, metricCommand, true));
addDepartment(headlessSchema);
MetricReq metricCommand2 = new MetricReq();
metricCommand2.setRootPath("s2");
metricCommand2.setDimensions(new ArrayList<>(
Arrays.asList("sys_imp_date", "user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand2.setWhere(
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand2.setLimit(1000L);
List<ColumnOrder> orders2 = new ArrayList<>();
orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand2.setOrder(orders2);
System.out.println(parser(headlessSchema, metricCommand2, true));
}
private static void addDepartment(HeadlessSchema headlessSchema) {
DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setName("user_department");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("count");
measure.setName("user_department_internal_cnt");
measure.setCreateMetric("true");
measure.setExpr("1");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("sys_imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("week");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
headlessSchema.getDatasource().put("user_department", HeadlessSchemaManager.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department");
dimension1.setName("department");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
headlessSchema.getDimension()
.put("user_department", HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
}
}

View File

@@ -0,0 +1,61 @@
package com.tencent.supersonic.headless.query.domain.calcite;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.DateConf.DateMode;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.headless.common.core.request.QueryStructReq;
import com.tencent.supersonic.headless.core.persistence.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.parser.QueryParser;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.core.utils.QueryUtils;
import lombok.extern.slf4j.Slf4j;
import java.util.Arrays;
@Slf4j
public class MaterializationQueryTest {
private final QueryParser queryParser;
private final QueryUtils queryUtils;
public MaterializationQueryTest(QueryParser queryParser,
QueryUtils queryUtils) {
this.queryParser = queryParser;
this.queryUtils = queryUtils;
}
public void test() {
QueryStructReq queryStructReq = new QueryStructReq();
queryStructReq.setModelId(1L);
Aggregator aggregator = new Aggregator();
aggregator.setFunc(AggOperatorEnum.UNKNOWN);
aggregator.setColumn("pv");
queryStructReq.setAggregators(Arrays.asList(aggregator));
queryStructReq.setGroups(Arrays.asList("department"));
DateConf dateConf = new DateConf();
dateConf.setDateMode(DateMode.LIST);
dateConf.setDateList(Arrays.asList("2023-08-01"));
queryStructReq.setDateInfo(dateConf);
try {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryStructReq(queryStructReq);
queryStatement.setIsS2SQL(false);
queryStatement = queryParser.logicSql(queryStatement);
queryUtils.checkSqlParse(queryStatement);
queryStatement.setModelIds(queryStructReq.getModelIds());
log.info("queryStatement:{}", queryStatement);
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStructReq, queryStatement);
}
//queryParser.test(queryStructReq,metricReq);
log.info("queryStatement:{}", queryStatement);
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,124 @@
package com.tencent.supersonic.headless.query.service;
import com.alibaba.excel.util.FileUtils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.common.core.request.BatchDownloadReq;
import com.tencent.supersonic.headless.common.server.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.common.server.pojo.RelateDimension;
import com.tencent.supersonic.headless.common.server.response.DimSchemaResp;
import com.tencent.supersonic.headless.common.server.response.MetricSchemaResp;
import com.tencent.supersonic.headless.common.server.response.ModelSchemaResp;
import com.tencent.supersonic.headless.common.server.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.headless.core.service.QueryService;
import com.tencent.supersonic.headless.core.service.impl.DownloadServiceImpl;
import com.tencent.supersonic.headless.server.service.ModelService;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.io.File;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
class DownloadServiceImplTest {
@Test
void testBatchDownload() throws Exception {
ModelService modelService = Mockito.mock(ModelService.class);
QueryService queryService = Mockito.mock(QueryService.class);
when(modelService.fetchModelSchema(any())).thenReturn(Lists.newArrayList(mockModelSchemaResp()));
when(queryService.queryByStruct(any(), any())).thenReturn(mockQueryResult());
DownloadServiceImpl downloadService = new DownloadServiceImpl(modelService, queryService);
String fileName = String.format("%s_%s.xlsx", "supersonic", DateUtils.format(new Date(), DateUtils.FORMAT));
File file = FileUtils.createTmpFile(fileName);
downloadService.batchDownload(buildBatchDownloadReq(), User.getFakeUser(), file);
}
private ModelSchemaResp mockModelSchemaResp() {
ModelSchemaResp modelSchemaResp = new ModelSchemaResp();
modelSchemaResp.setId(1L);
List<MetricSchemaResp> metricResps = Lists.newArrayList();
metricResps.add(mockMetricPv());
metricResps.add(mockMetricUv());
modelSchemaResp.setMetrics(metricResps);
List<DimSchemaResp> dimSchemaResps = Lists.newArrayList();
dimSchemaResps.add(mockDimension(1L, "user_name", "用户名"));
dimSchemaResps.add(mockDimension(2L, "department", "部门"));
dimSchemaResps.add(mockDimension(3L, "page", "页面"));
modelSchemaResp.setDimensions(dimSchemaResps);
return modelSchemaResp;
}
private MetricSchemaResp mockMetric(Long id, String bizName, String name, List<Long> drillDownloadDimensions) {
MetricSchemaResp metricResp = new MetricSchemaResp();
metricResp.setId(id);
metricResp.setBizName(bizName);
metricResp.setName(name);
RelateDimension relateDimension = new RelateDimension();
relateDimension.setDrillDownDimensions(drillDownloadDimensions.stream()
.map(DrillDownDimension::new).collect(Collectors.toList()));
metricResp.setRelateDimension(relateDimension);
return metricResp;
}
private DimSchemaResp mockDimension(Long id, String bizName, String name) {
DimSchemaResp dimSchemaResp = new DimSchemaResp();
dimSchemaResp.setId(id);
dimSchemaResp.setBizName(bizName);
dimSchemaResp.setName(name);
return dimSchemaResp;
}
private MetricSchemaResp mockMetricPv() {
return mockMetric(1L, "pv", "访问次数", Lists.newArrayList(1L, 2L));
}
private MetricSchemaResp mockMetricUv() {
return mockMetric(2L, "uv", "访问用户数", Lists.newArrayList(2L));
}
private BatchDownloadReq buildBatchDownloadReq() {
BatchDownloadReq batchDownloadReq = new BatchDownloadReq();
batchDownloadReq.setMetricIds(Lists.newArrayList(1L));
batchDownloadReq.setDateInfo(mockDataConf());
return batchDownloadReq;
}
private DateConf mockDataConf() {
DateConf dateConf = new DateConf();
dateConf.setStartDate("2023-10-11");
dateConf.setEndDate("2023-10-15");
dateConf.setDateMode(DateConf.DateMode.BETWEEN);
return dateConf;
}
private QueryResultWithSchemaResp mockQueryResult() {
QueryResultWithSchemaResp queryResultWithSchemaResp = new QueryResultWithSchemaResp();
List<Map<String, Object>> resultList = Lists.newArrayList();
resultList.add(createMap("2023-10-11", "tom", "hr", "1"));
resultList.add(createMap("2023-10-12", "alice", "sales", "2"));
resultList.add(createMap("2023-10-13", "jack", "sales", "3"));
resultList.add(createMap("2023-10-14", "luck", "market", "4"));
resultList.add(createMap("2023-10-15", "tom", "hr", "5"));
queryResultWithSchemaResp.setResultList(resultList);
return queryResultWithSchemaResp;
}
private static Map<String, Object> createMap(String sysImpDate, String d1, String d2, String m1) {
Map<String, Object> map = new HashMap<>();
map.put("sys_imp_date", sysImpDate);
map.put("user_name", d1);
map.put("department", d2);
map.put("pv", m1);
return map;
}
}

View File

@@ -0,0 +1,39 @@
package com.tencent.supersonic.headless.query.utils;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.headless.core.utils.DataTransformUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class DataTransformUtilsTest {
@Test
public void testTransform() {
List<Map<String, Object>> inputData = new ArrayList<>();
inputData.add(createMap("2023/10/11", "a", "b", "1"));
inputData.add(createMap("2023/10/12", "a", "c", "2"));
inputData.add(createMap("2023/10/13", "a", "b", "3"));
inputData.add(createMap("2023/10/14", "a", "c", "4"));
inputData.add(createMap("2023/10/15", "b", "b", "5"));
List<String> groups = Lists.newArrayList("d1", "d2");
String metric = "m1";
List<Map<String, Object>> resultData = DataTransformUtils.transform(inputData,
metric, groups, new DateConf());
Assertions.assertEquals(3, resultData.size());
}
private static Map<String, Object> createMap(String sysImpDate, String d1, String d2, String m1) {
Map<String, Object> map = new HashMap<>();
map.put("sys_imp_date", sysImpDate);
map.put("d1", d1);
map.put("d2", d2);
map.put("m1", m1);
return map;
}
}