[improvement][Chat] Support agent permission management (#1923)

* [improvement][Chat] Support agent permission management #1143

* [improvement][chat]Iterate LLM prompts of parsing and correction.

* [improvement][headless-fe] Added null-check conditions to the data formatting function.

* [improvement][headless]Clean code logic of headless translator.

---------

Co-authored-by: lxwcodemonkey <jolunoluo@tencent.com>
Co-authored-by: tristanliu <tristanliu@tencent.com>
This commit is contained in:
Jun Zhang
2024-11-23 09:09:04 +08:00
committed by GitHub
parent 244052e806
commit cb183b7ac8
66 changed files with 1023 additions and 1233 deletions

View File

@@ -1,9 +1,11 @@
package com.tencent.supersonic.chat.server.agent; package com.tencent.supersonic.chat.server.agent;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.tencent.supersonic.chat.server.memory.MemoryReviewTask; import com.tencent.supersonic.chat.server.memory.MemoryReviewTask;
import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.ChatApp;
import com.tencent.supersonic.common.pojo.RecordInfo; import com.tencent.supersonic.common.pojo.RecordInfo;
import com.tencent.supersonic.common.pojo.User;
import lombok.Data; import lombok.Data;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
@@ -12,6 +14,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Data @Data
@@ -33,6 +36,8 @@ public class Agent extends RecordInfo {
private String toolConfig; private String toolConfig;
private Map<String, ChatApp> chatAppConfig = Collections.emptyMap(); private Map<String, ChatApp> chatAppConfig = Collections.emptyMap();
private VisualConfig visualConfig; private VisualConfig visualConfig;
private List<String> admins = Lists.newArrayList();
private List<String> viewers = Lists.newArrayList();
public List<String> getTools(AgentToolType type) { public List<String> getTools(AgentToolType type) {
Map<String, Object> map = JSONObject.parseObject(toolConfig, Map.class); Map<String, Object> map = JSONObject.parseObject(toolConfig, Map.class);
@@ -105,4 +110,9 @@ public class Agent extends RecordInfo {
.filter(dataSetIds -> !CollectionUtils.isEmpty(dataSetIds)) .filter(dataSetIds -> !CollectionUtils.isEmpty(dataSetIds))
.flatMap(Collection::stream).collect(Collectors.toSet()); .flatMap(Collection::stream).collect(Collectors.toSet());
} }
public boolean contains(User user, Function<Agent, List<String>> list) {
return list.apply(this).contains(user.getName());
}
} }

View File

@@ -40,4 +40,8 @@ public class AgentDO {
private String chatModelConfig; private String chatModelConfig;
private String visualConfig; private String visualConfig;
private String admin;
private String viewer;
} }

View File

@@ -8,6 +8,7 @@ import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.agent.AgentToolType; import com.tencent.supersonic.chat.server.agent.AgentToolType;
import com.tencent.supersonic.chat.server.service.AgentService; import com.tencent.supersonic.chat.server.service.AgentService;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
@@ -15,6 +16,7 @@ import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
import java.util.List; import java.util.List;
@@ -48,8 +50,11 @@ public class AgentController {
} }
@RequestMapping("/getAgentList") @RequestMapping("/getAgentList")
public List<Agent> getAgentList() { public List<Agent> getAgentList(
return agentService.getAgents(); @RequestParam(value = "authType", required = false) AuthType authType,
HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) {
User user = UserHolder.findUser(httpServletRequest, httpServletResponse);
return agentService.getAgents(user, authType);
} }
@RequestMapping("/getToolTypes") @RequestMapping("/getToolTypes")

View File

@@ -2,10 +2,12 @@ package com.tencent.supersonic.chat.server.service;
import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import java.util.List; import java.util.List;
public interface AgentService { public interface AgentService {
List<Agent> getAgents(User user, AuthType authType);
List<Agent> getAgents(); List<Agent> getAgents();

View File

@@ -14,6 +14,7 @@ import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.config.ChatModel; import com.tencent.supersonic.common.config.ChatModel;
import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.ChatApp;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.service.ChatModelService; import com.tencent.supersonic.common.service.ChatModelService;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -43,6 +44,27 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
private ExecutorService executorService = Executors.newFixedThreadPool(1); private ExecutorService executorService = Executors.newFixedThreadPool(1);
@Override
public List<Agent> getAgents(User user, AuthType authType) {
return getAgentDOList().stream().map(this::convert)
.filter(agent -> filterByAuth(agent, user, authType)).collect(Collectors.toList());
}
private boolean filterByAuth(Agent agent, User user, AuthType authType) {
if (user.isSuperAdmin() || user.getName().equals(agent.getCreatedBy())) {
return true;
}
authType = authType == null ? AuthType.VIEWER : authType;
switch (authType) {
case ADMIN:
return agent.contains(user, Agent::getAdmins);
case VIEWER:
default:
return agent.contains(user, Agent::getAdmins)
|| agent.contains(user, Agent::getViewers);
}
}
@Override @Override
public List<Agent> getAgents() { public List<Agent> getAgents() {
return getAgentDOList().stream().map(this::convert).collect(Collectors.toList()); return getAgentDOList().stream().map(this::convert).collect(Collectors.toList());
@@ -135,6 +157,8 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
c.setChatModelConfig(chatModelService.getChatModel(c.getChatModelId()).getConfig()); c.setChatModelConfig(chatModelService.getChatModel(c.getChatModelId()).getConfig());
} }
}); });
agent.setAdmins(JsonUtil.toList(agentDO.getAdmin(), String.class));
agent.setViewers(JsonUtil.toList(agentDO.getViewer(), String.class));
return agent; return agent;
} }
@@ -145,6 +169,8 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
agentDO.setExamples(JsonUtil.toString(agent.getExamples())); agentDO.setExamples(JsonUtil.toString(agent.getExamples()));
agentDO.setChatModelConfig(JsonUtil.toString(agent.getChatAppConfig())); agentDO.setChatModelConfig(JsonUtil.toString(agent.getChatAppConfig()));
agentDO.setVisualConfig(JsonUtil.toString(agent.getVisualConfig())); agentDO.setVisualConfig(JsonUtil.toString(agent.getVisualConfig()));
agentDO.setAdmin(JsonUtil.toString(agent.getAdmins()));
agentDO.setViewer(JsonUtil.toString(agent.getViewers()));
if (agentDO.getStatus() == null) { if (agentDO.getStatus() == null) {
agentDO.setStatus(1); agentDO.setStatus(1);
} }

View File

@@ -1,5 +1,5 @@
package com.tencent.supersonic.common.pojo.enums; package com.tencent.supersonic.common.pojo.enums;
public enum AuthType { public enum AuthType {
VISIBLE, ADMIN VIEWER, ADMIN
} }

View File

@@ -7,14 +7,11 @@ import com.tencent.supersonic.headless.api.pojo.enums.SchemaType;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import static com.tencent.supersonic.common.pojo.Constants.UNDERLINE;
@Data @Data
@AllArgsConstructor @AllArgsConstructor
@NoArgsConstructor @NoArgsConstructor
@@ -32,13 +29,6 @@ public class SemanticSchemaResp {
private DatabaseResp databaseResp; private DatabaseResp databaseResp;
private QueryType queryType; private QueryType queryType;
public String getSchemaKey() {
if (dataSetId == null) {
return String.format("%s_%s", schemaType, StringUtils.join(modelIds, UNDERLINE));
}
return String.format("%s_%s", schemaType, dataSetId);
}
public MetricSchemaResp getMetric(String bizName) { public MetricSchemaResp getMetric(String bizName) {
return metrics.stream().filter(metric -> bizName.equalsIgnoreCase(metric.getBizName())) return metrics.stream().filter(metric -> bizName.equalsIgnoreCase(metric.getBizName()))
.findFirst().orElse(null); .findFirst().orElse(null);

View File

@@ -36,8 +36,8 @@ public class LLMSqlCorrector extends BaseSemanticCorrector {
+ "\n2.NO NEED to check date filters as the junior engineer seldom makes mistakes in this regard." + "\n2.NO NEED to check date filters as the junior engineer seldom makes mistakes in this regard."
+ "\n3.DO NOT miss the AGGREGATE operator of metrics, always add it as needed." + "\n3.DO NOT miss the AGGREGATE operator of metrics, always add it as needed."
+ "\n4.ALWAYS use `with` statement if nested aggregation is needed." + "\n4.ALWAYS use `with` statement if nested aggregation is needed."
+ "\n5.ALWAYS enclose alias created by `AS` command in underscores." + "\n5.ALWAYS enclose alias declared by `AS` command in underscores."
+ "\n6.ALWAYS translate alias created by `AS` command to the same language as the `#Question`." + "\n6.Alias created by `AS` command must be in the same language ast the `Question`."
+ "\n#Question:{{question}} #InputSQL:{{sql}} #Response:"; + "\n#Question:{{question}} #InputSQL:{{sql}} #Response:";
public LLMSqlCorrector() { public LLMSqlCorrector() {

View File

@@ -36,15 +36,13 @@ public class OnePassSCSqlGenStrategy extends SqlGenStrategy {
+ "\n#Task: You will be provided with a natural language question asked by users," + "\n#Task: You will be provided with a natural language question asked by users,"
+ "please convert it to a SQL query so that relevant data could be returned " + "please convert it to a SQL query so that relevant data could be returned "
+ "by executing the SQL query against underlying database." + "\n#Rules:" + "by executing the SQL query against underlying database." + "\n#Rules:"
+ "\n1.ALWAYS generate columns and values specified in the `Schema`, DO NOT hallucinate." + "\n1.SQL columns and values must be mentioned in the `Schema`, DO NOT hallucinate."
+ "\n2.ALWAYS be cautious, word in the `Schema` does not mean it must appear in the SQL." + "\n2.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator."
+ "\n3.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator." + "\n3.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`."
+ "\n4.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`." + "\n4.DO NOT calculate date range using functions."
+ "\n5.DO NOT calculate date range using functions." + "\n5.ALWAYS use `with` statement if nested aggregation is needed."
+ "\n6.DO NOT miss the AGGREGATE operator of metrics, always add it as needed." + "\n6.ALWAYS enclose alias declared by `AS` command in underscores."
+ "\n7.ALWAYS use `with` statement if nested aggregation is needed." + "\n7.Alias created by `AS` command must be in the same language ast the `Question`."
+ "\n8.ALWAYS enclose alias created by `AS` command in underscores."
+ "\n9.ALWAYS translate alias created by `AS` command to the same language as the `#Question`."
+ "\n#Exemplars: {{exemplar}}" + "\n#Exemplars: {{exemplar}}"
+ "\n#Query: Question:{{question}},Schema:{{schema}},SideInfo:{{information}}"; + "\n#Query: Question:{{question}},Schema:{{schema}},SideInfo:{{information}}";

View File

@@ -21,12 +21,6 @@ public abstract class DetailSemanticQuery extends RuleSemanticQuery {
super(); super();
} }
@Override
public List<SchemaElementMatch> match(List<SchemaElementMatch> candidateElementMatches,
ChatQueryContext queryCtx) {
return super.match(candidateElementMatches, queryCtx);
}
@Override @Override
public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) { public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) {
super.fillParseInfo(chatQueryContext, dataSetId); super.fillParseInfo(chatQueryContext, dataSetId);

View File

@@ -25,12 +25,6 @@ public abstract class MetricSemanticQuery extends RuleSemanticQuery {
queryMatcher.addOption(METRIC, REQUIRED, AT_LEAST, 1); queryMatcher.addOption(METRIC, REQUIRED, AT_LEAST, 1);
} }
@Override
public List<SchemaElementMatch> match(List<SchemaElementMatch> candidateElementMatches,
ChatQueryContext queryCtx) {
return super.match(candidateElementMatches, queryCtx);
}
@Override @Override
public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) { public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) {
super.fillParseInfo(chatQueryContext, dataSetId); super.fillParseInfo(chatQueryContext, dataSetId);

View File

@@ -4,9 +4,9 @@ import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.core.pojo.Materialization; import com.tencent.supersonic.headless.core.pojo.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.TimeRange; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.TimeRange;
import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable;
import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable.Builder; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable.Builder;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.adapter.enumerable.EnumerableRules; import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.config.CalciteConnectionConfigImpl; import org.apache.calcite.config.CalciteConnectionConfigImpl;
@@ -156,14 +156,14 @@ public abstract class AbstractAccelerator implements QueryAccelerator {
String[] dbTable = materialization.getName().split("\\."); String[] dbTable = materialization.getName().split("\\.");
String tb = dbTable[1].toLowerCase(); String tb = dbTable[1].toLowerCase();
String db = dbTable[0].toLowerCase(); String db = dbTable[0].toLowerCase();
Builder builder = DataSourceTable.newBuilder(tb); Builder builder = S2CalciteTable.newBuilder(tb);
for (String f : materialization.getColumns()) { for (String f : materialization.getColumns()) {
builder.addField(f, SqlTypeName.VARCHAR); builder.addField(f, SqlTypeName.VARCHAR);
} }
if (StringUtils.isNotBlank(materialization.getPartitionName())) { if (StringUtils.isNotBlank(materialization.getPartitionName())) {
builder.addField(materialization.getPartitionName(), SqlTypeName.VARCHAR); builder.addField(materialization.getPartitionName(), SqlTypeName.VARCHAR);
} }
DataSourceTable srcTable = builder.withRowCount(1L).build(); S2CalciteTable srcTable = builder.withRowCount(1L).build();
if (Objects.nonNull(db) && !db.isEmpty()) { if (Objects.nonNull(db) && !db.isEmpty()) {
SchemaPlus schemaPlus = dataSetSchema.plus().getSubSchema(db); SchemaPlus schemaPlus = dataSetSchema.plus().getSubSchema(db);
if (Objects.isNull(schemaPlus)) { if (Objects.isNull(schemaPlus)) {

View File

@@ -38,7 +38,7 @@ public class JdbcExecutor implements QueryExecutor {
SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class); SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class);
String sql = StringUtils.normalizeSpace(queryStatement.getSql()); String sql = StringUtils.normalizeSpace(queryStatement.getSql());
log.info("executing SQL: {}", sql); log.info("executing SQL: {}", sql);
Database database = queryStatement.getSemanticModel().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp(); SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
try { try {
SqlUtils sqlUtil = sqlUtils.init(database); SqlUtils sqlUtil = sqlUtils.init(database);

View File

@@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import lombok.Data; import lombok.Data;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -29,7 +29,7 @@ public class QueryStatement {
private String dataSetAlias; private String dataSetAlias;
private String dataSetSimplifySql; private String dataSetSimplifySql;
private Boolean enableLimitWrapper = false; private Boolean enableLimitWrapper = false;
private SemanticModel semanticModel; private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp; private SemanticSchemaResp semanticSchemaResp;
private Integer limit = 1000; private Integer limit = 1000;
private Boolean isTranslated = false; private Boolean isTranslated = false;

View File

@@ -1,34 +1,72 @@
package com.tencent.supersonic.headless.core.translator; package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.StringUtil; import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MetricTable; import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component @Component
@Slf4j @Slf4j
public class DefaultSemanticTranslator implements SemanticTranslator { public class DefaultSemanticTranslator implements SemanticTranslator {
@Autowired
private SqlGenerateUtils sqlGenerateUtils;
public void translate(QueryStatement queryStatement) { public void translate(QueryStatement queryStatement) {
if (queryStatement.isTranslated()) {
return;
}
try { try {
preprocess(queryStatement);
parse(queryStatement); parse(queryStatement);
optimize(queryStatement); optimize(queryStatement);
} catch (Exception e) { } catch (Exception e) {
@@ -36,13 +74,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} }
public void optimize(QueryStatement queryStatement) { private void parse(QueryStatement queryStatement) throws Exception {
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement);
}
}
public void parse(QueryStatement queryStatement) throws Exception {
QueryParam queryParam = queryStatement.getQueryParam(); QueryParam queryParam = queryStatement.getQueryParam();
if (Objects.isNull(queryStatement.getDataSetQueryParam())) { if (Objects.isNull(queryStatement.getDataSetQueryParam())) {
queryStatement.setDataSetQueryParam(new DataSetQueryParam()); queryStatement.setDataSetQueryParam(new DataSetQueryParam());
@@ -50,6 +82,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
if (Objects.isNull(queryStatement.getMetricQueryParam())) { if (Objects.isNull(queryStatement.getMetricQueryParam())) {
queryStatement.setMetricQueryParam(new MetricQueryParam()); queryStatement.setMetricQueryParam(new MetricQueryParam());
} }
log.debug("SemanticConverter before [{}]", queryParam); log.debug("SemanticConverter before [{}]", queryParam);
for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) { for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) {
if (headlessConverter.accept(queryStatement)) { if (headlessConverter.accept(queryStatement)) {
@@ -59,13 +92,16 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
log.debug("SemanticConverter after {} {} {}", queryParam, log.debug("SemanticConverter after {} {} {}", queryParam,
queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam()); queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam());
if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) { if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) {
doParse(queryStatement.getDataSetQueryParam(), queryStatement); doParse(queryStatement.getDataSetQueryParam(), queryStatement);
} else { } else {
queryStatement.getMetricQueryParam() queryStatement.getMetricQueryParam()
.setNativeQuery(queryParam.getQueryType().isNativeAggQuery()); .setNativeQuery(queryParam.getQueryType().isNativeAggQuery());
doParse(queryStatement); doParse(queryStatement,
AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery()));
} }
if (StringUtils.isEmpty(queryStatement.getSql())) { if (StringUtils.isEmpty(queryStatement.getSql())) {
throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg()); throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg());
} }
@@ -77,11 +113,11 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} }
public QueryStatement doParse(DataSetQueryParam dataSetQueryParam, private QueryStatement doParse(DataSetQueryParam dataSetQueryParam,
QueryStatement queryStatement) { QueryStatement queryStatement) {
log.info("parse dataSetQuery [{}] ", dataSetQueryParam); log.info("parse dataSetQuery [{}] ", dataSetQueryParam);
SemanticModel semanticModel = queryStatement.getSemanticModel(); Ontology ontology = queryStatement.getOntology();
EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType()); EngineType engineType = EngineType.fromString(ontology.getDatabase().getType());
try { try {
if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) { if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) {
List<String[]> tables = new ArrayList<>(); List<String[]> tables = new ArrayList<>();
@@ -132,12 +168,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
return queryStatement; return queryStatement;
} }
public QueryStatement doParse(QueryStatement queryStatement) { private QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) {
return doParse(queryStatement,
AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery()));
}
public QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) {
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam(); MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg); log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg);
try { try {
@@ -151,18 +182,19 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable, private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable,
DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception { DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception {
MetricQueryParam metricReq = new MetricQueryParam(); MetricQueryParam metricQueryParam = new MetricQueryParam();
metricReq.setMetrics(metricTable.getMetrics()); metricQueryParam.setMetrics(metricTable.getMetrics());
metricReq.setDimensions(metricTable.getDimensions()); metricQueryParam.setDimensions(metricTable.getDimensions());
metricReq.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere())); metricQueryParam.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere()));
metricReq.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption())); metricQueryParam.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption()));
QueryStatement tableSql = new QueryStatement(); QueryStatement tableSql = new QueryStatement();
tableSql.setIsS2SQL(false); tableSql.setIsS2SQL(false);
tableSql.setMetricQueryParam(metricReq); tableSql.setMetricQueryParam(metricQueryParam);
tableSql.setMinMaxTime(queryStatement.getMinMaxTime()); tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
tableSql.setEnableOptimize(queryStatement.getEnableOptimize()); tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
tableSql.setDataSetId(queryStatement.getDataSetId()); tableSql.setDataSetId(queryStatement.getDataSetId());
tableSql.setSemanticModel(queryStatement.getSemanticModel()); tableSql.setOntology(queryStatement.getOntology());
if (isSingleMetricTable) { if (isSingleMetricTable) {
tableSql.setDataSetSql(dataSetQueryParam.getSql()); tableSql.setDataSetSql(dataSetQueryParam.getSql());
tableSql.setDataSetAlias(metricTable.getAlias()); tableSql.setDataSetAlias(metricTable.getAlias());
@@ -174,4 +206,302 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
return tableSql; return tableSql;
} }
private void optimize(QueryStatement queryStatement) {
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement);
}
}
private void preprocess(QueryStatement queryStatement) {
if (StringUtils.isBlank(queryStatement.getSql())) {
return;
}
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
convertNameToBizName(queryStatement);
rewriteFunction(queryStatement);
queryStatement.setSql(SqlRemoveHelper.removeUnderscores(queryStatement.getSql()));
String tableName = SqlSelectHelper.getTableName(queryStatement.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
// correct order item is same as agg alias
String reqSql = queryStatement.getSql();
queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(queryStatement.getSql()));
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql());
// 5.build MetricTables
List<String> allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
QueryStructReq queryStructReq = new QueryStructReq();
MetricTable metricTable = new MetricTable();
metricTable.getMetrics().addAll(metrics);
metricTable.getDimensions().addAll(dimensions);
metricTable.setAlias(tableName.toLowerCase());
// if metric empty , fill model default
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
} else {
queryStructReq.getAggregators()
.addAll(metricTable.getMetrics().stream()
.map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN))
.collect(Collectors.toList()));
}
AggOption aggOption = getAggOption(queryStatement, metricSchemas);
metricTable.setAggOption(aggOption);
List<MetricTable> tables = new ArrayList<>();
tables.add(metricTable);
// 6.build ParseSqlReq
DataSetQueryParam datasetQueryParam = new DataSetQueryParam();
datasetQueryParam.setTables(tables);
datasetQueryParam.setSql(queryStatement.getSql());
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
database.getVersion())) {
datasetQueryParam.setSupportWith(false);
datasetQueryParam.setWithAlias(false);
}
// 7. do deriveMetric
generateDerivedMetric(semanticSchemaResp, aggOption, datasetQueryParam);
// 8.physicalSql by ParseSqlReq
// queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(queryStatement.getSql()));
queryStructReq.setDataSetId(queryStatement.getDataSetId());
queryStructReq.setQueryType(getQueryType(aggOption));
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
QueryParam queryParam = new QueryParam();
BeanUtils.copyProperties(queryStructReq, queryParam);
queryStatement.setQueryParam(queryParam);
queryStatement.setDataSetQueryParam(datasetQueryParam);
// queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
}
private AggOption getAggOption(QueryStatement queryStatement,
List<MetricSchemaResp> metricSchemas) {
String sql = queryStatement.getSql();
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
log.debug("getAggOption simple sql set to DEFAULT");
return AggOption.DEFAULT;
}
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
return AggOption.OUTER;
}
// if (queryStatement.isInnerLayerNative()) {
// return AggOption.NATIVE;
// }
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|| SqlSelectHelper.hasGroupBy(sql)) {
return AggOption.OUTER;
}
long defaultAggNullCnt = metricSchemas.stream().filter(
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
.count();
if (defaultAggNullCnt > 0) {
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
return AggOption.OUTER;
}
return AggOption.DEFAULT;
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceSqlByPositions(sql);
log.debug("replaceSqlByPositions:{}", sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.setSql(sql);
}
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
SchemaItem::getBizName, (k1, k2) -> k1));
dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
TimeDimensionEnum.DAY.getName());
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toSet());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void rewriteFunction(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
if (Objects.nonNull(engineAdaptor)) {
String functionNameCorrector =
engineAdaptor.functionNameCorrector(queryStatement.getSql());
queryStatement.setSql(functionNameCorrector);
}
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
private QueryType getQueryType(AggOption aggOption) {
boolean isAgg = AggOption.isAgg(aggOption);
QueryType queryType = QueryType.DETAIL;
if (isAgg) {
queryType = QueryType.AGGREGATE;
}
return queryType;
}
private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption,
DataSetQueryParam viewQueryParam) {
String sql = viewQueryParam.getSql();
for (MetricTable metricTable : viewQueryParam.getTables()) {
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(semanticSchemaResp, aggOption,
metricTable.getMetrics(), metricTable.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
metricTable.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
metricTable.setMetrics(new ArrayList<>(measures));
} else {
// empty measure , fill default
metricTable.setMetrics(new ArrayList<>());
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
}
}
}
viewQueryParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
AggOption aggOption, List<String> metrics, List<String> dimensions,
Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List<String> dimensions) {
if (!CollectionUtils.isEmpty(dimensions)) {
Map<String, Long> modelMatchCnt = new HashMap<>();
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(m -> m.getKey()).findFirst().orElse("");
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
}
} }

View File

@@ -1,22 +1,15 @@
package com.tencent.supersonic.headless.core.translator.calcite; package com.tencent.supersonic.headless.core.translator.calcite;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.QueryParser; import com.tencent.supersonic.headless.core.translator.QueryParser;
import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; import com.tencent.supersonic.headless.core.translator.calcite.sql.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.parser.SqlParseException;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.Collections;
import java.util.Objects;
/** the calcite parse implements */ /** the calcite parse implements */
@Component("CalciteQueryParser") @Component("CalciteQueryParser")
@Slf4j @Slf4j
@@ -24,55 +17,19 @@ public class CalciteQueryParser implements QueryParser {
@Override @Override
public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception { public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception {
MetricQueryParam metricReq = queryStatement.getMetricQueryParam(); Ontology ontology = queryStatement.getOntology();
SemanticModel semanticModel = queryStatement.getSemanticModel(); if (ontology == null) {
if (semanticModel == null) { queryStatement.setErrMsg("No ontology could be found");
queryStatement.setErrMsg("semanticSchema not found");
return; return;
} }
queryStatement.setMetricQueryParam(metricReq);
SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement); S2CalciteSchema semanticSchema = S2CalciteSchema.builder()
AggPlanner aggBuilder = new AggPlanner(semanticSchema); .schemaKey("DATASET_" + queryStatement.getDataSetId()).ontology(ontology)
aggBuilder.explain(queryStatement, isAgg); .runtimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType()); .enableOptimize(queryStatement.getEnableOptimize()).build())
queryStatement.setSql(aggBuilder.getSql(engineType)); .build();
if (Objects.nonNull(queryStatement.getEnableOptimize()) SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema);
&& queryStatement.getEnableOptimize() sqlBuilder.build(queryStatement, isAgg);
&& Objects.nonNull(queryStatement.getDataSetAlias())
&& !queryStatement.getDataSetAlias().isEmpty()) {
// simplify model sql with query sql
String simplifySql = aggBuilder.simplify(
getSqlByDataSet(engineType, aggBuilder.getSql(engineType),
queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()),
engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.debug("simplifySql [{}]", simplifySql);
queryStatement.setDataSetSimplifySql(simplifySql);
}
}
} }
private SemanticSchema getSemanticSchema(SemanticModel semanticModel,
QueryStatement queryStatement) {
SemanticSchema semanticSchema =
SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build();
semanticSchema.setSemanticModel(semanticModel);
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics());
semanticSchema.setJoinRelations(semanticModel.getJoinRelations());
semanticSchema.setRuntimeOptions(
RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
.enableOptimize(queryStatement.getEnableOptimize()).build());
return semanticSchema;
}
private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql,
String parentAlias) throws SqlParseException {
if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) {
return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql);
}
return SqlMergeWithUtils.mergeWith(engineType, dataSetSql,
Collections.singletonList(parentSql), Collections.singletonList(parentAlias));
}
} }

View File

@@ -1,17 +0,0 @@
package com.tencent.supersonic.headless.core.translator.calcite.planner;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** parse and generate SQL and other execute information */
public interface Planner {
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception;
public String getSql(EngineType enginType);
public String getSourceId();
public String simplify(String sql, EngineType engineType);
}

View File

@@ -7,13 +7,13 @@ import java.util.List;
@Data @Data
@Builder @Builder
public class DataSource { public class DataModel {
private Long id; private Long id;
private String name; private String name;
private Long sourceId; private Long modelId;
private String type; private String type;

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql; package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql; package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem;
import lombok.Data; import lombok.Data;
import java.util.List; import java.util.List;

View File

@@ -11,11 +11,10 @@ import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Data @Data
public class SemanticModel { public class Ontology {
private String schemaKey;
private List<Metric> metrics = new ArrayList<>(); private List<Metric> metrics = new ArrayList<>();
private Map<String, DataSource> datasourceMap = new HashMap<>(); private Map<String, DataModel> dataModelMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>(); private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
private List<Materialization> materializationList = new ArrayList<>(); private List<Materialization> materializationList = new ArrayList<>();
private List<JoinRelation> joinRelations; private List<JoinRelation> joinRelations;
@@ -26,8 +25,8 @@ public class SemanticModel {
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
public Map<Long, DataSource> getModelMap() { public Map<Long, DataModel> getModelMap() {
return datasourceMap.values().stream() return dataModelMap.values().stream()
.collect(Collectors.toMap(DataSource::getId, dataSource -> dataSource)); .collect(Collectors.toMap(DataModel::getId, dataSource -> dataSource));
} }
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema; package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
public interface SemanticItem { public interface SemanticItem {
String getName();
public String getName(); String getType();
} }

View File

@@ -1,136 +0,0 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.impl.AbstractSchema;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SemanticSchema extends AbstractSchema {
private final String schemaKey;
private final Map<String, Table> tableMap;
private SemanticModel semanticModel = new SemanticModel();
private List<JoinRelation> joinRelations;
private RuntimeOptions runtimeOptions;
private SemanticSchema(String schemaKey, Map<String, Table> tableMap) {
this.schemaKey = schemaKey;
this.tableMap = tableMap;
}
public static Builder newBuilder(String schemaKey) {
return new Builder(schemaKey);
}
public String getSchemaKey() {
return schemaKey;
}
public void setSemanticModel(SemanticModel semanticModel) {
this.semanticModel = semanticModel;
}
public SemanticModel getSemanticModel() {
return semanticModel;
}
@Override
public Map<String, Table> getTableMap() {
return tableMap;
}
@Override
public Schema snapshot(SchemaVersion version) {
return this;
}
public Map<String, DataSource> getDatasource() {
return semanticModel.getDatasourceMap();
}
public void setDatasource(Map<String, DataSource> datasource) {
semanticModel.setDatasourceMap(datasource);
}
public Map<String, List<Dimension>> getDimension() {
return semanticModel.getDimensionMap();
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
semanticModel.setDimensionMap(dimensions);
}
public List<Metric> getMetrics() {
return semanticModel.getMetrics();
}
public void setMetric(List<Metric> metric) {
semanticModel.setMetrics(metric);
}
public void setMaterializationList(List<Materialization> materializationList) {
semanticModel.setMaterializationList(materializationList);
}
public List<Materialization> getMaterializationList() {
return semanticModel.getMaterializationList();
}
public void setJoinRelations(List<JoinRelation> joinRelations) {
this.joinRelations = joinRelations;
}
public List<JoinRelation> getJoinRelations() {
return joinRelations;
}
public void setRuntimeOptions(RuntimeOptions runtimeOptions) {
this.runtimeOptions = runtimeOptions;
}
public RuntimeOptions getRuntimeOptions() {
return runtimeOptions;
}
public static final class Builder {
private final String schemaKey;
private final Map<String, Table> tableMap = new HashMap<>();
private Builder(String schemaKey) {
if (schemaKey == null) {
throw new IllegalArgumentException("Schema name cannot be null or empty");
}
this.schemaKey = schemaKey;
}
public Builder addTable(DataSourceTable table) {
if (tableMap.containsKey(table.getTableName())) {
throw new IllegalArgumentException(
"Table already defined: " + table.getTableName());
}
tableMap.put(table.getTableName(), table);
return this;
}
public SemanticSchema build() {
return new SemanticSchema(schemaKey, tableMap);
}
}
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelRule; import org.apache.calcite.plan.RelRule;
import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.Aggregate;
@@ -40,23 +39,23 @@ public class FilterToGroupScanRule extends RelRule<Config> implements Transforma
}); });
}).as(FilterTableScanRule.Config.class); }).as(FilterTableScanRule.Config.class);
private SemanticSchema semanticSchema; private S2CalciteSchema schema;
public FilterToGroupScanRule(FilterTableScanRule.Config config, SemanticSchema semanticSchema) { public FilterToGroupScanRule(FilterTableScanRule.Config config, S2CalciteSchema schema) {
super(config); super(config);
this.semanticSchema = semanticSchema; this.schema = schema;
} }
public void onMatch(RelOptRuleCall call) { public void onMatch(RelOptRuleCall call) {
if (call.rels.length != 4) { if (call.rels.length != 4) {
return; return;
} }
if (Objects.isNull(semanticSchema.getRuntimeOptions()) if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(semanticSchema.getRuntimeOptions().getMinMaxTime()) || Objects.isNull(schema.getRuntimeOptions().getMinMaxTime())
|| semanticSchema.getRuntimeOptions().getMinMaxTime().getLeft().isEmpty()) { || schema.getRuntimeOptions().getMinMaxTime().getLeft().isEmpty()) {
return; return;
} }
Triple<String, String, String> minMax = semanticSchema.getRuntimeOptions().getMinMaxTime(); Triple<String, String, String> minMax = schema.getRuntimeOptions().getMinMaxTime();
Filter filter = (Filter) call.rel(0); Filter filter = (Filter) call.rel(0);
Project project0 = (Project) call.rel(1); Project project0 = (Project) call.rel(1);
Project project1 = (Project) call.rel(3); Project project1 = (Project) call.rel(3);

View File

@@ -1,8 +0,0 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
public interface Optimization {
public void visit(SemanticSchema semanticSchema);
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -0,0 +1,48 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import lombok.Builder;
import lombok.Data;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.impl.AbstractSchema;
import java.util.List;
import java.util.Map;
@Data
@Builder
public class S2CalciteSchema extends AbstractSchema {
private String schemaKey;
private Ontology ontology;
private RuntimeOptions runtimeOptions;
@Override
public Schema snapshot(SchemaVersion version) {
return this;
}
public Map<String, DataModel> getDataModels() {
return ontology.getDataModelMap();
}
public List<Metric> getMetrics() {
return ontology.getMetrics();
}
public Map<String, List<Dimension>> getDimensions() {
return ontology.getDimensionMap();
}
public List<JoinRelation> getJoinRelations() {
return ontology.getJoinRelations();
}
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import org.apache.calcite.DataContext; import org.apache.calcite.DataContext;
import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerable;
@@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
/** customize the AbstractTable */ /** customize the AbstractTable */
public class DataSourceTable extends AbstractTable implements ScannableTable, TranslatableTable { public class S2CalciteTable extends AbstractTable implements ScannableTable, TranslatableTable {
private final String tableName; private final String tableName;
private final List<String> fieldNames; private final List<String> fieldNames;
@@ -32,7 +32,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
private RelDataType rowType; private RelDataType rowType;
private DataSourceTable(String tableName, List<String> fieldNames, List<SqlTypeName> fieldTypes, private S2CalciteTable(String tableName, List<String> fieldNames, List<SqlTypeName> fieldTypes,
Statistic statistic) { Statistic statistic) {
this.tableName = tableName; this.tableName = tableName;
this.fieldNames = fieldNames; this.fieldNames = fieldNames;
@@ -116,7 +116,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
return this; return this;
} }
public DataSourceTable build() { public S2CalciteTable build() {
if (fieldNames.isEmpty()) { if (fieldNames.isEmpty()) {
throw new IllegalStateException("Table must have at least one field"); throw new IllegalStateException("Table must have at least one field");
} }
@@ -125,7 +125,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
throw new IllegalStateException("Table must have positive row count"); throw new IllegalStateException("Table must have positive row count");
} }
return new DataSourceTable(tableName, fieldNames, fieldTypes, return new S2CalciteTable(tableName, fieldNames, fieldTypes,
Statistics.of(rowCount, null)); Statistics.of(rowCount, null));
} }
} }

View File

@@ -1,8 +1,7 @@
package com.tencent.supersonic.headless.core.translator.calcite.schema; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2SQLSqlValidatorImpl;
import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.prepare.CalciteCatalogReader; import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare; import org.apache.calcite.prepare.Prepare;
@@ -27,15 +26,14 @@ public class SchemaBuilder {
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1"; public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2"; public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
public static SqlValidatorScope getScope(SemanticSchema schema) throws Exception { public static SqlValidatorScope getScope(S2CalciteSchema schema) throws Exception {
Map<String, RelDataType> nameToTypeMap = new HashMap<>(); Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getSchemaKey(), schema); rootSchema.add(schema.getSchemaKey(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema, Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory, Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
Configuration.config); Configuration.config);
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator = S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType)); Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
@@ -45,12 +43,12 @@ public class SchemaBuilder {
public static CalciteSchema getMaterializationSchema() { public static CalciteSchema getMaterializationSchema() {
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
SchemaPlus schema = rootSchema.plus().add(MATERIALIZATION_SYS_DB, new AbstractSchema()); SchemaPlus schema = rootSchema.plus().add(MATERIALIZATION_SYS_DB, new AbstractSchema());
DataSourceTable srcTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_SOURCE) S2CalciteTable srcTable = S2CalciteTable.newBuilder(MATERIALIZATION_SYS_SOURCE)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE) .addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1) .addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1)
.build(); .build();
schema.add(MATERIALIZATION_SYS_SOURCE, srcTable); schema.add(MATERIALIZATION_SYS_SOURCE, srcTable);
DataSourceTable dataSetTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_VIEW) S2CalciteTable dataSetTable = S2CalciteTable.newBuilder(MATERIALIZATION_SYS_VIEW)
.addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE) .addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE)
.addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1) .addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1)
.build(); .build();
@@ -62,7 +60,7 @@ public class SchemaBuilder {
Set<String> dates, Set<String> dimensions, Set<String> metrics) { Set<String> dates, Set<String> dimensions, Set<String> metrics) {
String tb = tbSrc; String tb = tbSrc;
String db = dbSrc; String db = dbSrc;
DataSourceTable.Builder builder = DataSourceTable.newBuilder(tb); S2CalciteTable.Builder builder = S2CalciteTable.newBuilder(tb);
for (String date : dates) { for (String date : dates) {
builder.addField(date, SqlTypeName.VARCHAR); builder.addField(date, SqlTypeName.VARCHAR);
} }
@@ -72,7 +70,7 @@ public class SchemaBuilder {
for (String metric : metrics) { for (String metric : metrics) {
builder.addField(metric, SqlTypeName.ANY); builder.addField(metric, SqlTypeName.ANY);
} }
DataSourceTable srcTable = builder.withRowCount(1).build(); S2CalciteTable srcTable = builder.withRowCount(1).build();
if (Objects.nonNull(db) && !db.isEmpty()) { if (Objects.nonNull(db) && !db.isEmpty()) {
SchemaPlus dbPs = dataSetSchema.plus(); SchemaPlus dbPs = dataSetSchema.plus();
for (String d : db.split("\\.")) { for (String d : db.split("\\.")) {

View File

@@ -1,60 +1,91 @@
package com.tencent.supersonic.headless.core.translator.calcite.planner; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.OutputRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.OutputRender;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Objects; import java.util.Objects;
import java.util.Stack;
/** parsing from query dimensions and metrics */ /** parsing from query dimensions and metrics */
@Slf4j @Slf4j
public class AggPlanner implements Planner { public class SqlBuilder {
private MetricQueryParam metricReq; private final S2CalciteSchema schema;
private SemanticSchema schema; private MetricQueryParam metricQueryParam;
private SqlValidatorScope scope; private SqlValidatorScope scope;
private Stack<TableView> dataSets = new Stack<>();
private SqlNode parserNode; private SqlNode parserNode;
private String sourceId;
private boolean isAgg = false; private boolean isAgg = false;
private AggOption aggOption = AggOption.DEFAULT; private AggOption aggOption = AggOption.DEFAULT;
public AggPlanner(SemanticSchema schema) { public SqlBuilder(S2CalciteSchema schema) {
this.schema = schema; this.schema = schema;
} }
public void parse() throws Exception { public void build(QueryStatement queryStatement, AggOption aggOption) throws Exception {
this.metricQueryParam = queryStatement.getMetricQueryParam();
if (metricQueryParam.getMetrics() == null) {
metricQueryParam.setMetrics(new ArrayList<>());
}
if (metricQueryParam.getDimensions() == null) {
metricQueryParam.setDimensions(new ArrayList<>());
}
if (metricQueryParam.getLimit() == null) {
metricQueryParam.setLimit(0L);
}
this.aggOption = aggOption;
buildParseNode();
Database database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimizeParseNode(engineType);
String sql = getSql(engineType);
queryStatement.setSql(sql);
if (Objects.nonNull(queryStatement.getEnableOptimize())
&& queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getDataSetAlias())
&& !queryStatement.getDataSetAlias().isEmpty()) {
// simplify model sql with query sql
String simplifySql = rewrite(getSqlByDataSet(engineType, sql,
queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.debug("simplifySql [{}]", simplifySql);
queryStatement.setDataSetSimplifySql(simplifySql);
}
}
}
private void buildParseNode() throws Exception {
// find the match Datasource // find the match Datasource
scope = SchemaBuilder.getScope(schema); scope = SchemaBuilder.getScope(schema);
List<DataSource> datasource = getMatchDataSource(scope); List<DataModel> dataModels =
if (datasource == null || datasource.isEmpty()) { DataModelNode.getRelatedDataModels(scope, schema, metricQueryParam);
throw new Exception("datasource not found"); if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found");
} }
isAgg = getAgg(datasource.get(0)); isAgg = getAgg(dataModels.get(0));
sourceId = String.valueOf(datasource.get(0).getSourceId());
// build level by level // build level by level
LinkedList<Renderer> builders = new LinkedList<>(); LinkedList<Renderer> builders = new LinkedList<>();
@@ -67,84 +98,36 @@ public class AggPlanner implements Planner {
while (it.hasNext()) { while (it.hasNext()) {
Renderer renderer = it.next(); Renderer renderer = it.next();
if (previous != null) { if (previous != null) {
previous.render(metricReq, datasource, scope, schema, !isAgg); previous.render(metricQueryParam, dataModels, scope, schema, !isAgg);
renderer.setTable(previous renderer.setTable(previous
.builderAs(DataSourceNode.getNames(datasource) + "_" + String.valueOf(i))); .builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++; i++;
} }
previous = renderer; previous = renderer;
} }
builders.getLast().render(metricReq, datasource, scope, schema, !isAgg); builders.getLast().render(metricQueryParam, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder(); parserNode = builders.getLast().builder();
} }
private List<DataSource> getMatchDataSource(SqlValidatorScope scope) throws Exception { private boolean getAgg(DataModel dataModel) {
return DataSourceNode.getMatchDataSources(scope, schema, metricReq);
}
private boolean getAgg(DataSource dataSource) {
if (!AggOption.DEFAULT.equals(aggOption)) { if (!AggOption.DEFAULT.equals(aggOption)) {
return AggOption.isAgg(aggOption); return AggOption.isAgg(aggOption);
} }
// default by dataSource time aggregation // default by dataModel time aggregation
if (Objects.nonNull(dataSource.getAggTime()) && !dataSource.getAggTime() if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!metricReq.isNativeQuery()) { if (!metricQueryParam.isNativeQuery()) {
return true; return true;
} }
} }
return isAgg; return isAgg;
} }
@Override
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception {
this.metricReq = queryStatement.getMetricQueryParam();
if (metricReq.getMetrics() == null) {
metricReq.setMetrics(new ArrayList<>());
}
if (metricReq.getDimensions() == null) {
metricReq.setDimensions(new ArrayList<>());
}
if (metricReq.getLimit() == null) {
metricReq.setLimit(0L);
}
this.aggOption = aggOption;
// build a parse Node
parse();
// optimizer
Database database = queryStatement.getSemanticModel().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimize(engineType);
}
@Override
public String getSql(EngineType engineType) { public String getSql(EngineType engineType) {
return SemanticNode.getSql(parserNode, engineType); return SemanticNode.getSql(parserNode, engineType);
} }
@Override private String rewrite(String sql, EngineType engineType) {
public String getSourceId() {
return sourceId;
}
@Override
public String simplify(String sql, EngineType engineType) {
return optimize(sql, engineType);
}
public void optimize(EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
}
SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode, engineType), engineType);
if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode;
}
}
public String optimize(String sql, EngineType engineType) {
try { try {
SqlNode sqlNode = SqlNode sqlNode =
SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
@@ -153,21 +136,41 @@ public class AggPlanner implements Planner {
SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType); SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
} }
} catch (Exception e) { } catch (Exception e) {
log.error("optimize error {}", e); log.error("optimize error {}", e.toString());
} }
return ""; return "";
} }
private SqlNode optimizeSql(String sql, EngineType engineType) { private void optimizeParseNode(EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
}
SqlNode optimizeNode = null;
try { try {
SqlNode sqlNode = SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType),
SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) { if (Objects.nonNull(sqlNode)) {
return SemanticNode.optimize(scope, schema, sqlNode, engineType); optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType);
} }
} catch (Exception e) { } catch (Exception e) {
log.error("optimize error {}", e); log.error("optimize error {}", e);
} }
return null;
if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode;
}
} }
private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql,
String parentAlias) throws SqlParseException {
if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) {
return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql);
}
return SqlMergeWithUtils.mergeWith(engineType, dataSetSql,
Collections.singletonList(parentSql), Collections.singletonList(parentAlias));
}
} }

View File

@@ -1,6 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlKind;
@@ -27,7 +27,7 @@ public class TableView {
private String alias; private String alias;
private List<String> primary; private List<String> primary;
private DataSource dataSource; private DataModel dataModel;
public SqlNode build() { public SqlNode build() {
measure.addAll(dimension); measure.addAll(dimension);

View File

@@ -6,14 +6,13 @@ import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend.LateralViewExplodeNode;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlDataTypeSpec; import org.apache.calcite.sql.SqlDataTypeSpec;
@@ -38,30 +37,30 @@ import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Slf4j @Slf4j
public class DataSourceNode extends SemanticNode { public class DataModelNode extends SemanticNode {
public static SqlNode build(DataSource datasource, SqlValidatorScope scope) throws Exception { public static SqlNode build(DataModel dataModel, SqlValidatorScope scope) throws Exception {
String sqlTable = ""; String sqlTable = "";
if (datasource.getSqlQuery() != null && !datasource.getSqlQuery().isEmpty()) { if (dataModel.getSqlQuery() != null && !dataModel.getSqlQuery().isEmpty()) {
sqlTable = datasource.getSqlQuery(); sqlTable = dataModel.getSqlQuery();
} else if (datasource.getTableQuery() != null && !datasource.getTableQuery().isEmpty()) { } else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) {
if (datasource.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) { if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = Arrays.stream(datasource.getTableQuery().split("\\.")) String fullTableName = Arrays.stream(dataModel.getTableQuery().split("\\."))
.collect(Collectors.joining(".public.")); .collect(Collectors.joining(".public."));
sqlTable = "select * from " + fullTableName; sqlTable = "select * from " + fullTableName;
} else { } else {
sqlTable = "select * from " + datasource.getTableQuery(); sqlTable = "select * from " + dataModel.getTableQuery();
} }
} }
if (sqlTable.isEmpty()) { if (sqlTable.isEmpty()) {
throw new Exception("DatasourceNode build error [tableSqlNode not found]"); throw new Exception("DatasourceNode build error [tableSqlNode not found]");
} }
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(datasource.getType())); SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType()));
addSchema(scope, datasource, sqlTable); addSchema(scope, dataModel, sqlTable);
return buildAs(datasource.getName(), source); return buildAs(dataModel.getName(), source);
} }
private static void addSchema(SqlValidatorScope scope, DataSource datasource, String table) private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table)
throws Exception { throws Exception {
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table); Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) { for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
@@ -75,7 +74,7 @@ public class DataSourceNode extends SemanticNode {
} }
} }
private static void addSchemaTable(SqlValidatorScope scope, DataSource datasource, String db, private static void addSchemaTable(SqlValidatorScope scope, DataModel datasource, String db,
String tb, Set<String> fields) throws Exception { String tb, Set<String> fields) throws Exception {
Set<String> dateInfo = new HashSet<>(); Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>(); Set<String> dimensions = new HashSet<>();
@@ -112,7 +111,7 @@ public class DataSourceNode extends SemanticNode {
dateInfo, dimensions, metrics); dateInfo, dimensions, metrics);
} }
public static SqlNode buildExtend(DataSource datasource, Map<String, String> exprList, public static SqlNode buildExtend(DataModel datasource, Map<String, String> exprList,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
if (CollectionUtils.isEmpty(exprList)) { if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope); return build(datasource, scope);
@@ -146,11 +145,11 @@ public class DataSourceNode extends SemanticNode {
return sqlNode; return sqlNode;
} }
public static String getNames(List<DataSource> dataSourceList) { public static String getNames(List<DataModel> dataModelList) {
return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
} }
public static void getQueryDimensionMeasure(SemanticSchema schema, public static void getQueryDimensionMeasure(S2CalciteSchema schema,
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures) { MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures) {
queryDimension.addAll(metricCommand.getDimensions().stream() queryDimension.addAll(metricCommand.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) .map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
@@ -166,11 +165,10 @@ public class DataSourceNode extends SemanticNode {
.forEach(m -> measures.add(m)); .forEach(m -> measures.add(m));
} }
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema,
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures, MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>(); Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType),
@@ -193,18 +191,18 @@ public class DataSourceNode extends SemanticNode {
} }
} }
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, public static List<DataModel> getRelatedDataModels(SqlValidatorScope scope,
SemanticSchema schema, MetricQueryParam metricCommand) throws Exception { S2CalciteSchema schema, MetricQueryParam metricCommand) throws Exception {
List<DataSource> dataSources = new ArrayList<>(); List<DataModel> dataModels = new ArrayList<>();
// check by metric // check by metric
List<String> measures = new ArrayList<>(); List<String> measures = new ArrayList<>();
Set<String> queryDimension = new HashSet<>(); Set<String> queryDimension = new HashSet<>();
getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures); getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures);
DataSource baseDataSource = null; DataModel baseDataModel = null;
// one , match measure count // one , match measure count
Map<String, Integer> dataSourceMeasures = new HashMap<>(); Map<String, Integer> dataSourceMeasures = new HashMap<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) { for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream() Set<String> sourceMeasure = entry.getValue().getMeasures().stream()
.map(mm -> mm.getName()).collect(Collectors.toSet()); .map(mm -> mm.getName()).collect(Collectors.toSet());
sourceMeasure.retainAll(measures); sourceMeasure.retainAll(measures);
@@ -214,58 +212,58 @@ public class DataSourceNode extends SemanticNode {
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream() Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) { if (base.isPresent()) {
baseDataSource = schema.getDatasource().get(base.get().getKey()); baseDataModel = schema.getDataModels().get(base.get().getKey());
dataSources.add(baseDataSource); dataModels.add(baseDataModel);
} }
// second , check match all dimension and metric // second , check match all dimension and metric
if (baseDataSource != null) { if (baseDataModel != null) {
Set<String> filterMeasure = new HashSet<>(); Set<String> filterMeasure = new HashSet<>();
Set<String> sourceMeasure = baseDataSource.getMeasures().stream() Set<String> sourceMeasure = baseDataModel.getMeasures().stream().map(mm -> mm.getName())
.map(mm -> mm.getName()).collect(Collectors.toSet());
Set<String> dimension = baseDataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
baseDataSource.getIdentifiers().stream().forEach(i -> dimension.add(i.getName())); Set<String> dimension = baseDataModel.getDimensions().stream().map(dd -> dd.getName())
if (schema.getDimension().containsKey(baseDataSource.getName())) { .collect(Collectors.toSet());
schema.getDimension().get(baseDataSource.getName()).stream() baseDataModel.getIdentifiers().stream().forEach(i -> dimension.add(i.getName()));
if (schema.getDimensions().containsKey(baseDataModel.getName())) {
schema.getDimensions().get(baseDataModel.getName()).stream()
.forEach(d -> dimension.add(d.getName())); .forEach(d -> dimension.add(d.getName()));
} }
filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension); filterMeasure.addAll(dimension);
EngineType engineType = EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); EngineType.fromString(schema.getOntology().getDatabase().getType());
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures,
scope); scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension,
metricCommand, scope, engineType); metricCommand, scope, engineType);
if (isAllMatch) { if (isAllMatch) {
log.debug("baseDataSource match all "); log.debug("baseDataModel match all ");
return dataSources; return dataModels;
} }
// find all dataSource has the same identifiers // find all dataSource has the same identifiers
List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, List<DataModel> linkDataModels = getLinkDataSourcesByJoinRelation(queryDimension,
measures, baseDataSource, schema); measures, baseDataModel, schema);
if (CollectionUtils.isEmpty(linkDataSources)) { if (CollectionUtils.isEmpty(linkDataModels)) {
log.debug("baseDataSource get by identifiers "); log.debug("baseDataModel get by identifiers ");
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream() Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream()
.map(i -> i.getName()).collect(Collectors.toSet()); .map(i -> i.getName()).collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) { if (baseIdentifiers.isEmpty()) {
throw new Exception( throw new Exception(
"datasource error : " + baseDataSource.getName() + " miss identifier"); "datasource error : " + baseDataModel.getName() + " miss identifier");
} }
linkDataSources = getLinkDataSources(baseIdentifiers, queryDimension, measures, linkDataModels = getLinkDataSources(baseIdentifiers, queryDimension, measures,
baseDataSource, schema); baseDataModel, schema);
if (linkDataSources.isEmpty()) { if (linkDataModels.isEmpty()) {
throw new Exception(String.format( throw new Exception(String.format(
"not find the match datasource : dimension[%s],measure[%s]", "not find the match datasource : dimension[%s],measure[%s]",
queryDimension, measures)); queryDimension, measures));
} }
} }
log.debug("linkDataSources {}", linkDataSources); log.debug("linkDataModels {}", linkDataModels);
return linkDataSources; return linkDataModels;
// dataSources.addAll(linkDataSources); // dataModels.addAll(linkDataModels);
} }
return dataSources; return dataModels;
} }
private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension, private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension,
@@ -301,17 +299,17 @@ public class DataSourceNode extends SemanticNode {
return isAllMatch; return isAllMatch;
} }
private static List<DataSource> getLinkDataSourcesByJoinRelation(Set<String> queryDimension, private static List<DataModel> getLinkDataSourcesByJoinRelation(Set<String> queryDimension,
List<String> measures, DataSource baseDataSource, SemanticSchema schema) { List<String> measures, DataModel baseDataModel, S2CalciteSchema schema) {
Set<String> linkDataSourceName = new HashSet<>(); Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>(); List<DataModel> linkDataModels = new ArrayList<>();
Set<String> before = new HashSet<>(); Set<String> before = new HashSet<>();
before.add(baseDataSource.getName()); before.add(baseDataModel.getName());
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
Set<Long> visitJoinRelations = new HashSet<>(); Set<Long> visitJoinRelations = new HashSet<>();
List<JoinRelation> sortedJoinRelation = new ArrayList<>(); List<JoinRelation> sortedJoinRelation = new ArrayList<>();
sortJoinRelation(schema.getJoinRelations(), baseDataSource.getName(), sortJoinRelation(schema.getJoinRelations(), baseDataModel.getName(), visitJoinRelations,
visitJoinRelations, sortedJoinRelation); sortedJoinRelation);
schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId())) schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId()))
.forEach(j -> sortedJoinRelation.add(j)); .forEach(j -> sortedJoinRelation.add(j));
for (JoinRelation joinRelation : sortedJoinRelation) { for (JoinRelation joinRelation : sortedJoinRelation) {
@@ -321,8 +319,8 @@ public class DataSourceNode extends SemanticNode {
} }
boolean isMatch = false; boolean isMatch = false;
boolean isRight = before.contains(joinRelation.getLeft()); boolean isRight = before.contains(joinRelation.getLeft());
DataSource other = isRight ? schema.getDatasource().get(joinRelation.getRight()) DataModel other = isRight ? schema.getDataModels().get(joinRelation.getRight())
: schema.getDatasource().get(joinRelation.getLeft()); : schema.getDataModels().get(joinRelation.getLeft());
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream() Set<String> linkDimension = other.getDimensions().stream()
.map(dd -> dd.getName()).collect(Collectors.toSet()); .map(dd -> dd.getName()).collect(Collectors.toSet());
@@ -338,8 +336,8 @@ public class DataSourceNode extends SemanticNode {
if (!linkMeasure.isEmpty()) { if (!linkMeasure.isEmpty()) {
isMatch = true; isMatch = true;
} }
if (!isMatch && schema.getDimension().containsKey(other.getName())) { if (!isMatch && schema.getDimensions().containsKey(other.getName())) {
Set<String> linkDimension = schema.getDimension().get(other.getName()).stream() Set<String> linkDimension = schema.getDimensions().get(other.getName()).stream()
.map(dd -> dd.getName()).collect(Collectors.toSet()); .map(dd -> dd.getName()).collect(Collectors.toSet());
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
@@ -354,8 +352,8 @@ public class DataSourceNode extends SemanticNode {
} }
if (!CollectionUtils.isEmpty(linkDataSourceName)) { if (!CollectionUtils.isEmpty(linkDataSourceName)) {
Map<String, Long> orders = new HashMap<>(); Map<String, Long> orders = new HashMap<>();
linkDataSourceName.add(baseDataSource.getName()); linkDataSourceName.add(baseDataModel.getName());
orders.put(baseDataSource.getName(), 0L); orders.put(baseDataModel.getName(), 0L);
for (JoinRelation joinRelation : schema.getJoinRelations()) { for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (linkDataSourceName.contains(joinRelation.getLeft()) if (linkDataSourceName.contains(joinRelation.getLeft())
&& linkDataSourceName.contains(joinRelation.getRight())) { && linkDataSourceName.contains(joinRelation.getRight())) {
@@ -364,10 +362,10 @@ public class DataSourceNode extends SemanticNode {
} }
} }
orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> { orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> {
linkDataSources.add(schema.getDatasource().get(d.getKey())); linkDataModels.add(schema.getDataModels().get(d.getKey()));
}); });
} }
return linkDataSources; return linkDataModels;
} }
private static void sortJoinRelation(List<JoinRelation> joinRelations, String next, private static void sortJoinRelation(List<JoinRelation> joinRelations, String next,
@@ -385,13 +383,13 @@ public class DataSourceNode extends SemanticNode {
} }
} }
private static List<DataSource> getLinkDataSources(Set<String> baseIdentifiers, private static List<DataModel> getLinkDataSources(Set<String> baseIdentifiers,
Set<String> queryDimension, List<String> measures, DataSource baseDataSource, Set<String> queryDimension, List<String> measures, DataModel baseDataModel,
SemanticSchema schema) { S2CalciteSchema schema) {
Set<String> linkDataSourceName = new HashSet<>(); Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>(); List<DataModel> linkDataModels = new ArrayList<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) { for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) {
if (entry.getKey().equalsIgnoreCase(baseDataSource.getName())) { if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
continue; continue;
} }
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName()) Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName())
@@ -421,7 +419,7 @@ public class DataSourceNode extends SemanticNode {
} }
} }
} }
for (Map.Entry<String, List<Dimension>> entry : schema.getDimension().entrySet()) { for (Map.Entry<String, List<Dimension>> entry : schema.getDimensions().entrySet()) {
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName()) Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
@@ -432,12 +430,12 @@ public class DataSourceNode extends SemanticNode {
} }
} }
for (String linkName : linkDataSourceName) { for (String linkName : linkDataSourceName) {
linkDataSources.add(schema.getDatasource().get(linkName)); linkDataModels.add(schema.getDataModels().get(linkName));
} }
if (!CollectionUtils.isEmpty(linkDataSources)) { if (!CollectionUtils.isEmpty(linkDataModels)) {
List<DataSource> all = new ArrayList<>(); List<DataModel> all = new ArrayList<>();
all.add(baseDataSource); all.add(baseDataModel);
all.addAll(linkDataSources); all.addAll(linkDataModels);
return all; return all;
} }
return Lists.newArrayList(); return Lists.newArrayList();

View File

@@ -1,13 +0,0 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
@Data
public class JoinNode extends SemanticNode {
private SqlNode join;
private SqlNode on;
private SqlNode left;
private SqlNode right;
}

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend; package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.ExtendNode;
import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlIdentifier;

View File

@@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -30,7 +30,7 @@ public class MetricNode extends SemanticNode {
return buildAs(metric.getName(), sqlNode); return buildAs(metric.getName(), sqlNode);
} }
public static Boolean isMetricField(String name, SemanticSchema schema) { public static Boolean isMetricField(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream() Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric(); return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric();

View File

@@ -5,8 +5,8 @@ import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
import com.tencent.supersonic.common.calcite.SqlDialectFactory; import com.tencent.supersonic.common.calcite.SqlDialectFactory;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer.FilterToGroupScanRule; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepPlanner;
@@ -397,7 +397,7 @@ public abstract class SemanticNode {
return parseInfo; return parseInfo;
} }
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode, public static SqlNode optimize(SqlValidatorScope scope, S2CalciteSchema schema, SqlNode sqlNode,
EngineType engineType) { EngineType engineType) {
try { try {
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder(); HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();

View File

@@ -3,10 +3,9 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
@@ -27,14 +26,13 @@ import java.util.stream.Collectors;
public class FilterRender extends Renderer { public class FilterRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView; TableView tableView = super.tableView;
SqlNode filterNode = null; SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics()); List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions()); List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType); filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
@@ -43,9 +41,9 @@ public class FilterRender extends Renderer {
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList()); List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>(); Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>(); Set<String> metrics = new HashSet<>();
for (DataSource dataSource : dataSources) { for (DataModel dataModel : dataModels) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(), SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(), dataSource, schema, dimensions, metrics); metricCommand.getDimensions(), dataModel, schema, dimensions, metrics);
} }
queryMetrics.addAll(metrics); queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions); queryDimensions.addAll(dimensions);

View File

@@ -3,17 +3,16 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
@@ -48,11 +47,10 @@ import java.util.stream.Collectors;
public class JoinRender extends Renderer { public class JoinRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere(); String queryWhere = metricCommand.getWhere();
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
@@ -62,7 +60,7 @@ public class JoinRender extends Renderer {
} }
Set<String> queryAllDimension = new HashSet<>(); Set<String> queryAllDimension = new HashSet<>();
List<String> measures = new ArrayList<>(); List<String> measures = new ArrayList<>();
DataSourceNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures);
SqlNode left = null; SqlNode left = null;
TableView leftTable = null; TableView leftTable = null;
TableView innerView = new TableView(); TableView innerView = new TableView();
@@ -71,14 +69,14 @@ public class JoinRender extends Renderer {
Set<String> filterDimension = new HashSet<>(); Set<String> filterDimension = new HashSet<>();
Map<String, String> beforeSources = new HashMap<>(); Map<String, String> beforeSources = new HashMap<>();
for (int i = 0; i < dataSources.size(); i++) { for (int i = 0; i < dataModels.size(); i++) {
final DataSource dataSource = dataSources.get(i); final DataModel dataModel = dataModels.get(i);
final Set<String> filterDimensions = new HashSet<>(); final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>(); final Set<String> filterMetrics = new HashSet<>();
final List<String> queryDimension = new ArrayList<>(); final List<String> queryDimension = new ArrayList<>();
final List<String> queryMetrics = new ArrayList<>(); final List<String> queryMetrics = new ArrayList<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataSource, SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
schema, filterDimensions, filterMetrics); filterDimensions, filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics()); List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics); reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric); reqMetric = uniqList(reqMetric);
@@ -87,40 +85,40 @@ public class JoinRender extends Renderer {
reqDimension.addAll(filterDimensions); reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension); reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataSource.getMeasures().stream().map(mm -> mm.getName()) Set<String> sourceMeasure = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataSource, sourceMeasure, doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataModel, sourceMeasure,
scope, schema, nonAgg); scope, schema, nonAgg);
Set<String> dimension = dataSource.getDimensions().stream().map(dd -> dd.getName()) Set<String> dimension = dataModel.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataSource, doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataModel,
dimension, scope, schema); dimension, scope, schema);
List<String> primary = new ArrayList<>(); List<String> primary = new ArrayList<>();
for (Identify identify : dataSource.getIdentifiers()) { for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName()); primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) { if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName()); fieldWhere.add(identify.getName());
} }
} }
List<String> dataSourceWhere = new ArrayList<>(fieldWhere); List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataSource, dataSourceWhere); addZipperField(dataModel, dataSourceWhere);
TableView tableView = TableView tableView =
SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension, SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataSources.get(i), scope, schema, true); metricCommand.getWhere(), dataModels.get(i), scope, schema, true);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString())); log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias); tableView.setAlias(alias);
tableView.setPrimary(primary); tableView.setPrimary(primary);
tableView.setDataSource(dataSource); tableView.setDataModel(dataModel);
if (left == null) { if (left == null) {
leftTable = tableView; leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)); left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope));
beforeSources.put(dataSource.getName(), leftTable.getAlias()); beforeSources.put(dataModel.getName(), leftTable.getAlias());
continue; continue;
} }
left = buildJoin(left, leftTable, tableView, beforeSources, dataSource, schema, scope); left = buildJoin(left, leftTable, tableView, beforeSources, dataModel, schema, scope);
leftTable = tableView; leftTable = tableView;
beforeSources.put(dataSource.getName(), tableView.getAlias()); beforeSources.put(dataModel.getName(), tableView.getAlias());
} }
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) { for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) {
@@ -144,16 +142,14 @@ public class JoinRender extends Renderer {
} }
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView,
List<String> queryMetrics, List<String> reqMetrics, DataSource dataSource, List<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
Set<String> sourceMeasure, SqlValidatorScope scope, SemanticSchema schema, Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception { boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String m : reqMetrics) { for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) { if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) { for (String measure : metricNode.getNonAggNode().keySet()) {
@@ -181,14 +177,13 @@ public class JoinRender extends Renderer {
} }
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
List<String> queryDimension, List<String> reqDimensions, DataSource dataSource, List<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
Set<String> dimension, SqlValidatorScope scope, SemanticSchema schema) Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception { throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String d : reqDimensions) { for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) { if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY); String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode
@@ -209,7 +204,7 @@ public class JoinRender extends Renderer {
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }
private boolean getMatchMetric(SemanticSchema schema, Set<String> sourceMeasure, String m, private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
List<String> queryMetrics) { List<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream() Optional<Metric> metric = schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst(); .filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
@@ -230,8 +225,8 @@ public class JoinRender extends Renderer {
return isAdd; return isAdd;
} }
private boolean getMatchDimension(SemanticSchema schema, Set<String> sourceDimension, private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
DataSource dataSource, String d, List<String> queryDimension) { DataModel dataModel, String d, List<String> queryDimension) {
String oriDimension = d; String oriDimension = d;
boolean isAdd = false; boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) {
@@ -240,14 +235,14 @@ public class JoinRender extends Renderer {
if (sourceDimension.contains(oriDimension)) { if (sourceDimension.contains(oriDimension)) {
isAdd = true; isAdd = true;
} }
for (Identify identify : dataSource.getIdentifiers()) { for (Identify identify : dataModel.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) { if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true; isAdd = true;
break; break;
} }
} }
if (schema.getDimension().containsKey(dataSource.getName())) { if (schema.getDimensions().containsKey(dataModel.getName())) {
for (Dimension dim : schema.getDimension().get(dataSource.getName())) { for (Dimension dim : schema.getDimensions().get(dataModel.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) { if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true; isAdd = true;
} }
@@ -264,12 +259,11 @@ public class JoinRender extends Renderer {
} }
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
Map<String, String> before, DataSource dataSource, SemanticSchema schema, Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
SqlNode condition = SqlNode condition =
getCondition(leftTable, tableView, dataSource, schema, scope, engineType); getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema); JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null; SqlNode joinRelationCondition = null;
@@ -278,11 +272,11 @@ public class JoinRender extends Renderer {
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType); joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition; condition = joinRelationCondition;
} }
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType()) if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType())
|| Materialization.TimePartType.ZIPPER || Materialization.TimePartType.ZIPPER
.equals(tableView.getDataSource().getTimePartType())) { .equals(tableView.getDataModel().getTimePartType())) {
SqlNode zipperCondition = SqlNode zipperCondition =
getZipperCondition(leftTable, tableView, dataSource, schema, scope); getZipperCondition(leftTable, tableView, dataModel, schema, scope);
if (Objects.nonNull(joinRelationCondition)) { if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(SqlStdOperatorTable.AND, condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)), new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
@@ -299,11 +293,11 @@ public class JoinRender extends Renderer {
} }
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView, private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
SemanticSchema schema) { S2CalciteSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build(); JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) { for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataSource().getName()) if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getLeft())) { && before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream() matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of( .map(r -> Triple.of(
@@ -338,8 +332,8 @@ public class JoinRender extends Renderer {
return condition; return condition;
} }
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, private SqlNode getCondition(TableView left, TableView right, DataModel dataModel,
SemanticSchema schema, SqlValidatorScope scope, EngineType engineType) S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
throws Exception { throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable()); Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
@@ -347,16 +341,16 @@ public class JoinRender extends Renderer {
selectLeft.retainAll(selectRight); selectLeft.retainAll(selectRight);
SqlNode condition = null; SqlNode condition = null;
for (String on : selectLeft) { for (String on : selectLeft) {
if (!SourceRender.isDimension(on, dataSource, schema)) { if (!SourceRender.isDimension(on, dataModel, schema)) {
continue; continue;
} }
if (IdentifyNode.isForeign(on, left.getDataSource().getIdentifiers())) { if (IdentifyNode.isForeign(on, left.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, right.getDataSource().getIdentifiers())) { if (!IdentifyNode.isPrimary(on, right.getDataModel().getIdentifiers())) {
continue; continue;
} }
} }
if (IdentifyNode.isForeign(on, right.getDataSource().getIdentifiers())) { if (IdentifyNode.isForeign(on, right.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, left.getDataSource().getIdentifiers())) { if (!IdentifyNode.isPrimary(on, left.getDataModel().getIdentifiers())) {
continue; continue;
} }
} }
@@ -396,9 +390,9 @@ public class JoinRender extends Renderer {
visited.put(id, false); visited.put(id, false);
} }
private void addZipperField(DataSource dataSource, List<String> fields) { private void addZipperField(DataModel dataModel, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) { if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
dataSource.getDimensions().stream() dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.forEach(t -> { .forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END) if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
@@ -413,18 +407,18 @@ public class JoinRender extends Renderer {
} }
} }
private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel,
SemanticSchema schema, SqlValidatorScope scope) throws Exception { S2CalciteSchema schema, SqlValidatorScope scope) throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType())
&& Materialization.TimePartType.ZIPPER && Materialization.TimePartType.ZIPPER
.equals(right.getDataSource().getTimePartType())) { .equals(right.getDataModel().getTimePartType())) {
throw new Exception("not support two zipper table"); throw new Exception("not support two zipper table");
} }
SqlNode condition = null; SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataSource().getDimensions().stream() Optional<Dimension> leftTime = left.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); .findFirst();
Optional<Dimension> rightTime = right.getDataSource().getDimensions().stream() Optional<Dimension> rightTime = right.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); .findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) { if (leftTime.isPresent() && rightTime.isPresent()) {
@@ -434,7 +428,7 @@ public class JoinRender extends Renderer {
String dateTime = ""; String dateTime = "";
Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataSource().getTimePartType()) ? left : right).getDataSource() .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream() .getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME .filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType())) .equalsIgnoreCase(d.getType()))
@@ -442,7 +436,7 @@ public class JoinRender extends Renderer {
.startsWith(Constants.MATERIALIZATION_ZIPPER_START)) .startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst(); .findFirst();
Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataSource().getTimePartType()) ? left : right).getDataSource() .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream() .getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME .filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType())) .equalsIgnoreCase(d.getType()))
@@ -451,17 +445,17 @@ public class JoinRender extends Renderer {
.findFirst(); .findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) { if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper = Materialization.TimePartType.ZIPPER TableView zipper = Materialization.TimePartType.ZIPPER
.equals(left.getDataSource().getTimePartType()) ? left : right; .equals(left.getDataModel().getTimePartType()) ? left : right;
TableView partMetric = Materialization.TimePartType.ZIPPER TableView partMetric = Materialization.TimePartType.ZIPPER
.equals(left.getDataSource().getTimePartType()) ? right : left; .equals(left.getDataModel().getTimePartType()) ? right : left;
Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER
.equals(left.getDataSource().getTimePartType()) ? rightTime : leftTime; .equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName(); startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName(); dateTime = partMetric.getAlias() + "." + partTime.get().getName();
} }
EngineType engineType = EngineType engineType =
EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); EngineType.fromString(schema.getOntology().getDatabase().getType());
ArrayList<SqlNode> operandList = ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType), new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))); SemanticNode.parse(dateTime, scope, engineType)));

View File

@@ -3,9 +3,8 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
@@ -23,11 +22,10 @@ import java.util.List;
public class OutputRender extends Renderer { public class OutputRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataSource> dataSources, public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView; TableView selectDataSet = super.tableView;
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String dimension : metricCommand.getDimensions()) { for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType)); selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
} }

View File

@@ -1,13 +1,14 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql; package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
@@ -27,29 +28,29 @@ public abstract class Renderer {
protected TableView tableView = new TableView(); protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataSource datasource) { public static Optional<Dimension> getDimensionByName(String name, DataModel datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)) return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst(); .findFirst();
} }
public static Optional<Measure> getMeasureByName(String name, DataSource datasource) { public static Optional<Measure> getMeasureByName(String name, DataModel datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)) return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name))
.findFirst(); .findFirst();
} }
public static Optional<Metric> getMetricByName(String name, SemanticSchema schema) { public static Optional<Metric> getMetricByName(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream() Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric; return metric;
} }
public static Optional<Identify> getIdentifyByName(String name, DataSource datasource) { public static Optional<Identify> getIdentifyByName(String name, DataModel datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)) return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst(); .findFirst();
} }
public static MetricNode buildMetricNode(String metric, DataSource datasource, public static MetricNode buildMetricNode(String metric, DataModel datasource,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg, String alias) SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg, String alias)
throws Exception { throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema); Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode(); MetricNode metricNode = new MetricNode();
@@ -113,6 +114,6 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build()); return SemanticNode.buildAs(alias, tableView.build());
} }
public abstract void render(MetricQueryParam metricCommand, List<DataSource> dataSources, public abstract void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception; SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
} }

View File

@@ -3,16 +3,15 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DimensionNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DimensionNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode;
@@ -21,7 +20,6 @@ import com.tencent.supersonic.headless.core.translator.calcite.sql.node.Semantic
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.util.Litmus;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
@@ -44,7 +42,7 @@ public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres, public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions, String queryWhere, List<String> reqMetrics, List<String> reqDimensions, String queryWhere,
DataSource datasource, SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception { throws Exception {
TableView dataSet = new TableView(); TableView dataSet = new TableView();
@@ -97,7 +95,7 @@ public class SourceRender extends Renderer {
output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure())); output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure()));
dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure())); dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure()));
SqlNode tableNode = DataSourceNode.buildExtend(datasource, extendFields, scope); SqlNode tableNode = DataModelNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode); dataSet.setTable(tableNode);
output.setTable( output.setTable(
SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName()
@@ -107,12 +105,11 @@ public class SourceRender extends Renderer {
private static void buildDimension(String alias, String dimension, DataSource datasource, private static void buildDimension(String alias, String dimension, DataModel datasource,
SemanticSchema schema, boolean nonAgg, Map<String, String> extendFields, S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName()); List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
boolean isAdd = false; boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) { if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) { for (Dimension dim : dimensionList) {
@@ -186,12 +183,11 @@ public class SourceRender extends Renderer {
} }
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics, private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, Map<String, String> extendFields, DataSource datasource, List<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator(); Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>(); List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
while (iterator.hasNext()) { while (iterator.hasNext()) {
String cur = iterator.next(); String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) { if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -199,7 +195,7 @@ public class SourceRender extends Renderer {
} }
} }
for (String where : fields) { for (String where : fields) {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName()); List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
boolean isAdd = false; boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) { if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) { for (Dimension dim : dimensionList) {
@@ -229,8 +225,8 @@ public class SourceRender extends Renderer {
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet, private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
List<String> queryMetrics, List<String> queryDimensions, List<String> queryMetrics, List<String> queryDimensions,
Map<String, String> extendFields, DataSource datasource, SqlValidatorScope scope, Map<String, String> extendFields, DataModel datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { S2CalciteSchema schema, boolean nonAgg) throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
extendFields, datasource, scope, schema, nonAgg); extendFields, datasource, scope, schema, nonAgg);
dataSet.getMeasure().addAll(whereNode); dataSet.getMeasure().addAll(whereNode);
@@ -238,7 +234,7 @@ public class SourceRender extends Renderer {
} }
public static void whereDimMetric(List<String> fields, List<String> queryMetrics, public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SemanticSchema schema, List<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
Set<String> dimensions, Set<String> metrics) { Set<String> dimensions, Set<String> metrics) {
for (String field : fields) { for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) { if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
@@ -252,8 +248,8 @@ public class SourceRender extends Renderer {
} }
} }
private static void addField(String field, String oriField, DataSource datasource, private static void addField(String field, String oriField, DataModel datasource,
SemanticSchema schema, Set<String> dimensions, Set<String> metrics) { S2CalciteSchema schema, Set<String> dimensions, Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream() Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst(); .filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) { if (dimension.isPresent()) {
@@ -266,8 +262,8 @@ public class SourceRender extends Renderer {
dimensions.add(oriField); dimensions.add(oriField);
return; return;
} }
if (schema.getDimension().containsKey(datasource.getName())) { if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()) Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst(); .stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) { if (dataSourceDim.isPresent()) {
dimensions.add(oriField); dimensions.add(oriField);
@@ -293,7 +289,7 @@ public class SourceRender extends Renderer {
} }
} }
public static boolean isDimension(String name, DataSource datasource, SemanticSchema schema) { public static boolean isDimension(String name, DataModel datasource, S2CalciteSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream() Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); .filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) { if (dimension.isPresent()) {
@@ -304,8 +300,8 @@ public class SourceRender extends Renderer {
if (identify.isPresent()) { if (identify.isPresent()) {
return true; return true;
} }
if (schema.getDimension().containsKey(datasource.getName())) { if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()) Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); .stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) { if (dataSourceDim.isPresent()) {
return true; return true;
@@ -314,13 +310,13 @@ public class SourceRender extends Renderer {
return false; return false;
} }
private static void addTimeDimension(DataSource dataSource, List<String> queryDimension) { private static void addTimeDimension(DataModel dataModel, List<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) { if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
Optional<Dimension> startTimeOp = dataSource.getDimensions().stream() Optional<Dimension> startTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)) .filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst(); .findFirst();
Optional<Dimension> endTimeOp = dataSource.getDimensions().stream() Optional<Dimension> endTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)) .filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst(); .findFirst();
@@ -331,7 +327,7 @@ public class SourceRender extends Renderer {
queryDimension.add(endTimeOp.get().getName()); queryDimension.add(endTimeOp.get().getName());
} }
} else { } else {
Optional<Dimension> timeOp = dataSource.getDimensions().stream() Optional<Dimension> timeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); .findFirst();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) { if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
@@ -340,27 +336,26 @@ public class SourceRender extends Renderer {
} }
} }
public void render(MetricQueryParam metricQueryParam, List<DataSource> dataSources, public void render(MetricQueryParam metricQueryParam, List<DataModel> dataModels,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricQueryParam.getWhere(); String queryWhere = metricQueryParam.getWhere();
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType); SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields); FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList()); fieldWhere = whereFields.stream().collect(Collectors.toList());
} }
if (dataSources.size() == 1) { if (dataModels.size() == 1) {
DataSource dataSource = dataSources.get(0); DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(), super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(),
metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataSource, metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataModel, scope,
scope, schema, nonAgg); schema, nonAgg);
return; return;
} }
JoinRender joinRender = new JoinRender(); JoinRender joinRender = new JoinRender();
joinRender.render(metricQueryParam, dataSources, scope, schema, nonAgg); joinRender.render(metricQueryParam, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView(); super.tableView = joinRender.getTableView();
} }
} }

View File

@@ -97,7 +97,7 @@ public class CalculateAggConverter implements QueryConverter {
@Override @Override
public void convert(QueryStatement queryStatement) throws Exception { public void convert(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getSemanticModel().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement, DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement,
EngineType.fromString(database.getType().toUpperCase()), database.getVersion()); EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
queryStatement.setDataSetQueryParam(dataSetQueryParam); queryStatement.setDataSetQueryParam(dataSetQueryParam);

View File

@@ -34,7 +34,7 @@ public class DefaultDimValueConverter implements QueryConverter {
@Override @Override
public void convert(QueryStatement queryStatement) { public void convert(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getSemanticModel().getDimensions().stream() List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues())) .filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {

View File

@@ -5,7 +5,7 @@ import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
@@ -60,7 +60,7 @@ public class ParserDefaultConverter implements QueryConverter {
// support detail query // support detail query
if (queryParam.getQueryType().isNativeAggQuery() if (queryParam.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(metricQueryParam.getMetrics())) { && CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
Map<Long, DataSource> modelMap = queryStatement.getSemanticModel().getModelMap(); Map<Long, DataModel> modelMap = queryStatement.getOntology().getModelMap();
for (Long modelId : modelMap.keySet()) { for (Long modelId : modelMap.keySet()) {
String modelBizName = modelMap.get(modelId).getName(); String modelBizName = modelMap.get(modelId).getName();
String internalMetricName = String internalMetricName =

View File

@@ -4,7 +4,7 @@ import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils; import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -39,9 +39,9 @@ public class SqlVariableParseConverter implements QueryConverter {
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(), modelResp.getModelDetail().getSqlVariables(),
queryStatement.getQueryParam().getParams()); queryStatement.getQueryParam().getParams());
DataSource dataSource = queryStatement.getSemanticModel().getDatasourceMap() DataModel dataModel =
.get(modelResp.getBizName()); queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataSource.setSqlQuery(sqlParsed); dataModel.setSqlQuery(sqlParsed);
} }
} }
} }

View File

@@ -260,9 +260,8 @@ public class S2DataPermissionAspect {
} }
public void checkModelVisible(User user, Set<Long> modelIds) { public void checkModelVisible(User user, Set<Long> modelIds) {
List<Long> modelListVisible = List<Long> modelListVisible = modelService.getModelListWithAuth(user, null, AuthType.VIEWER)
modelService.getModelListWithAuth(user, null, AuthType.VISIBLE).stream() .stream().map(ModelResp::getId).collect(Collectors.toList());
.map(ModelResp::getId).collect(Collectors.toList());
List<Long> modelIdCopied = new ArrayList<>(modelIds); List<Long> modelIdCopied = new ArrayList<>(modelIds);
modelIdCopied.removeAll(modelListVisible); modelIdCopied.removeAll(modelListVisible);
if (!CollectionUtils.isEmpty(modelIdCopied)) { if (!CollectionUtils.isEmpty(modelIdCopied)) {

View File

@@ -34,7 +34,6 @@ import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator; import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService; import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
@@ -44,13 +43,13 @@ import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService; import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.service.SchemaService;
import com.tencent.supersonic.headless.server.utils.MetricDrillDownChecker; import com.tencent.supersonic.headless.server.utils.MetricDrillDownChecker;
import com.tencent.supersonic.headless.server.utils.QueryReqConverter;
import com.tencent.supersonic.headless.server.utils.QueryUtils; import com.tencent.supersonic.headless.server.utils.QueryUtils;
import com.tencent.supersonic.headless.server.utils.StatUtils; import com.tencent.supersonic.headless.server.utils.StatUtils;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList; import java.util.ArrayList;
@@ -67,7 +66,6 @@ public class S2SemanticLayerService implements SemanticLayerService {
private final StatUtils statUtils; private final StatUtils statUtils;
private final QueryUtils queryUtils; private final QueryUtils queryUtils;
private final QueryReqConverter queryReqConverter;
private final SemanticSchemaManager semanticSchemaManager; private final SemanticSchemaManager semanticSchemaManager;
private final DataSetService dataSetService; private final DataSetService dataSetService;
private final SchemaService schemaService; private final SchemaService schemaService;
@@ -80,14 +78,13 @@ public class S2SemanticLayerService implements SemanticLayerService {
private final List<QueryExecutor> queryExecutors = ComponentFactory.getQueryExecutors(); private final List<QueryExecutor> queryExecutors = ComponentFactory.getQueryExecutors();
public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils, public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils,
QueryReqConverter queryReqConverter, SemanticSchemaManager semanticSchemaManager, SemanticSchemaManager semanticSchemaManager, DataSetService dataSetService,
DataSetService dataSetService, SchemaService schemaService, SchemaService schemaService, SemanticTranslator semanticTranslator,
SemanticTranslator semanticTranslator, MetricDrillDownChecker metricDrillDownChecker, MetricDrillDownChecker metricDrillDownChecker,
KnowledgeBaseService knowledgeBaseService, MetricService metricService, KnowledgeBaseService knowledgeBaseService, MetricService metricService,
DimensionService dimensionService) { DimensionService dimensionService) {
this.statUtils = statUtils; this.statUtils = statUtils;
this.queryUtils = queryUtils; this.queryUtils = queryUtils;
this.queryReqConverter = queryReqConverter;
this.semanticSchemaManager = semanticSchemaManager; this.semanticSchemaManager = semanticSchemaManager;
this.dataSetService = dataSetService; this.dataSetService = dataSetService;
this.schemaService = schemaService; this.schemaService = schemaService;
@@ -122,7 +119,6 @@ public class S2SemanticLayerService implements SemanticLayerService {
statUtils.initStatInfo(queryReq, user); statUtils.initStatInfo(queryReq, user);
// 2.query from cache // 2.query from cache
String cacheKey = queryCache.getCacheKey(queryReq); String cacheKey = queryCache.getCacheKey(queryReq);
Object query = queryCache.query(queryReq, cacheKey); Object query = queryCache.query(queryReq, cacheKey);
if (Objects.nonNull(query)) { if (Objects.nonNull(query)) {
@@ -136,16 +132,16 @@ public class S2SemanticLayerService implements SemanticLayerService {
} }
StatUtils.get().setUseResultCache(false); StatUtils.get().setUseResultCache(false);
// 3 query // 3 translate query
QueryStatement queryStatement = buildQueryStatement(queryReq, user); QueryStatement queryStatement = buildQueryStatement(queryReq, user);
semanticTranslator.translate(queryStatement);
// Check whether the dimensions of the metric drill-down are correct temporarily,
// add the abstraction of a validator later.
metricDrillDownChecker.checkQuery(queryStatement);
// 4.execute query
SemanticQueryResp queryResp = null; SemanticQueryResp queryResp = null;
// skip translation if already done.
if (!queryStatement.isTranslated()) {
semanticTranslator.translate(queryStatement);
}
queryPreCheck(queryStatement);
for (QueryExecutor queryExecutor : queryExecutors) { for (QueryExecutor queryExecutor : queryExecutors) {
if (queryExecutor.accept(queryStatement)) { if (queryExecutor.accept(queryStatement)) {
queryResp = queryExecutor.execute(queryStatement); queryResp = queryExecutor.execute(queryStatement);
@@ -154,7 +150,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
} }
} }
// 4 reset cache and set stateInfo // 5.reset cache and set stateInfo
Boolean setCacheSuccess = queryCache.put(cacheKey, queryResp); Boolean setCacheSuccess = queryCache.put(cacheKey, queryResp);
if (setCacheSuccess) { if (setCacheSuccess) {
// if result is not null, update cache data // if result is not null, update cache data
@@ -185,7 +181,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds); List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds);
// If the search results are null, search dimensionValue from the database // try to query dimensionValue from the database.
if (CollectionUtils.isEmpty(dimensionValues)) { if (CollectionUtils.isEmpty(dimensionValues)) {
return getDimensionValuesFromDb(dimensionValueReq, user); return getDimensionValuesFromDb(dimensionValueReq, user);
} }
@@ -218,9 +214,29 @@ public class S2SemanticLayerService implements SemanticLayerService {
.map(MapResult::getName).collect(Collectors.toList()); .map(MapResult::getName).collect(Collectors.toList());
} }
private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq dimensionValueReq, private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq queryDimValueReq,
User user) { User user) {
QuerySqlReq querySqlReq = buildQuerySqlReq(dimensionValueReq); QuerySqlReq querySqlReq = new QuerySqlReq();
List<ModelResp> modelResps =
schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));
DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(),
queryDimValueReq.getModelId());
ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
modelResp.getName());
List<Dim> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate());
}
if (StringUtils.isNotBlank(queryDimValueReq.getValue())) {
sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%"
+ queryDimValueReq.getValue() + "%'";
}
querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId()));
querySqlReq.setSql(sql);
return queryByReq(querySqlReq, user); return queryByReq(querySqlReq, user);
} }
@@ -271,35 +287,16 @@ public class S2SemanticLayerService implements SemanticLayerService {
return metricService.getMetrics(metaFilter); return metricService.getMetrics(metaFilter);
} }
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user) {
throws Exception {
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
querySqlReq.setDataSetId(dataSetId);
}
SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq);
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
QueryStatement queryStatement = queryReqConverter.convert(querySqlReq, semanticSchemaResp);
queryStatement.setModelIds(querySqlReq.getModelIds());
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
return queryStatement;
}
private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user)
throws Exception {
QueryStatement queryStatement = null; QueryStatement queryStatement = null;
if (semanticQueryReq instanceof QuerySqlReq) { if (semanticQueryReq instanceof QuerySqlReq) {
queryStatement = buildSqlQueryStatement((QuerySqlReq) semanticQueryReq, user); queryStatement = buildSqlQueryStatement((QuerySqlReq) semanticQueryReq, user);
} }
if (semanticQueryReq instanceof QueryStructReq) { if (semanticQueryReq instanceof QueryStructReq) {
queryStatement = buildStructQueryStatement((QueryStructReq) semanticQueryReq); queryStatement = buildStructQueryStatement(semanticQueryReq);
} }
if (semanticQueryReq instanceof QueryMultiStructReq) { if (semanticQueryReq instanceof QueryMultiStructReq) {
queryStatement = queryStatement = buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq);
buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq, user);
} }
if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo()) if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo())
&& StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) { && StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) {
@@ -310,72 +307,46 @@ public class S2SemanticLayerService implements SemanticLayerService {
return queryStatement; return queryStatement;
} }
private QueryStatement buildStructQueryStatement(QueryStructReq queryStructReq) { private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
SchemaFilterReq filter = buildSchemaFilterReq(queryStructReq); // If dataSetId or DataSetName is empty, parse dataSetId from the SQL
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); if (querySqlReq.needGetDataSetId()) {
QueryStatement queryStatement = new QueryStatement(); Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
QueryParam queryParam = new QueryParam(); querySqlReq.setDataSetId(dataSetId);
queryReqConverter.convert(queryStructReq, queryParam); }
queryStatement.setQueryParam(queryParam);
queryStatement.setIsS2SQL(false); QueryStatement queryStatement = buildStructQueryStatement(querySqlReq);
queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setIsS2SQL(true);
queryStatement.setDataSetId(queryStructReq.getDataSetId()); queryStatement.setSql(querySqlReq.getSql());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
return queryStatement; return queryStatement;
} }
private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq, private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
User user) throws Exception {
List<QueryStatement> sqlParsers = new ArrayList<>();
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
QueryStatement queryStatement = buildQueryStatement(queryStructReq, user);
SemanticModel semanticModel = queryStatement.getSemanticModel();
queryStatement.setModelIds(queryStructReq.getModelIds());
queryStatement.setSemanticModel(semanticModel);
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
semanticTranslator.translate(queryStatement);
sqlParsers.add(queryStatement);
}
log.info("multi sqlParser:{}", sqlParsers);
return queryUtils.sqlParserUnion(queryMultiStructReq, sqlParsers);
}
private SchemaFilterReq buildSchemaFilterReq(SemanticQueryReq semanticQueryReq) {
SchemaFilterReq schemaFilterReq = new SchemaFilterReq(); SchemaFilterReq schemaFilterReq = new SchemaFilterReq();
schemaFilterReq.setDataSetId(semanticQueryReq.getDataSetId()); schemaFilterReq.setDataSetId(queryReq.getDataSetId());
schemaFilterReq.setModelIds(semanticQueryReq.getModelIds()); schemaFilterReq.setModelIds(queryReq.getModelIds());
return schemaFilterReq; SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq);
QueryStatement queryStatement = new QueryStatement();
QueryParam queryParam = new QueryParam();
BeanUtils.copyProperties(queryReq, queryParam);
queryStatement.setQueryParam(queryParam);
queryStatement.setModelIds(queryReq.getModelIds());
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement;
} }
private QuerySqlReq buildQuerySqlReq(DimensionValueReq queryDimValueReq) { private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) {
QuerySqlReq querySqlReq = new QuerySqlReq(); List<QueryStatement> queryStatements = new ArrayList<>();
List<ModelResp> modelResps = for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId())); QueryStatement queryStatement = buildStructQueryStatement(queryStructReq);
DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(), semanticTranslator.translate(queryStatement);
queryDimValueReq.getModelId()); queryStatements.add(queryStatement);
ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
modelResp.getName());
List<Dim> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate());
} }
if (StringUtils.isNotBlank(queryDimValueReq.getValue())) { log.info("Union multiple query statements:{}", queryStatements);
sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%" return queryUtils.unionAll(queryMultiStructReq, queryStatements);
+ queryDimValueReq.getValue() + "%'";
}
querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId()));
querySqlReq.setSql(sql);
return querySqlReq;
}
private void queryPreCheck(QueryStatement queryStatement) {
// Check whether the dimensions of the metric drill-down are correct temporarily,
// add the abstraction of a validator later.
metricDrillDownChecker.checkQuery(queryStatement);
} }
} }

View File

@@ -8,7 +8,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.TagResp; import com.tencent.supersonic.headless.api.pojo.response.TagResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams;
@@ -18,8 +18,8 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materializa
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
@@ -57,9 +57,8 @@ public class SemanticSchemaManager {
this.schemaService = schemaService; this.schemaService = schemaService;
} }
public SemanticModel getSemanticModel(SemanticSchemaResp semanticSchemaResp) { public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
SemanticModel semanticModel = new SemanticModel(); Ontology ontology = new Ontology();
semanticModel.setSchemaKey(semanticSchemaResp.getSchemaKey());
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>(); Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>(); List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>(); List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
@@ -67,36 +66,35 @@ public class SemanticSchemaManager {
schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls, schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls,
metricYamlTpls, modelIdName); metricYamlTpls, modelIdName);
DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp(); DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp();
semanticModel.setDatabase(DatabaseConverter.convert(databaseResp)); ontology.setDatabase(DatabaseConverter.convert(databaseResp));
if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) { if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) {
semanticModel.setJoinRelations( ontology.setJoinRelations(
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName)); getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
} }
if (!dataModelYamlTpls.isEmpty()) { if (!dataModelYamlTpls.isEmpty()) {
Map<String, DataSource> dataSourceMap = Map<String, DataModel> dataModelMap =
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect( dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect(
Collectors.toMap(DataSource::getName, item -> item, (k1, k2) -> k1)); Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1));
semanticModel.setDatasourceMap(dataSourceMap); ontology.setDataModelMap(dataModelMap);
} }
if (!dimensionYamlTpls.isEmpty()) { if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>(); Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) { for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue())); dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
} }
semanticModel.setDimensionMap(dimensionMap); ontology.setDimensionMap(dimensionMap);
} }
if (!metricYamlTpls.isEmpty()) { if (!metricYamlTpls.isEmpty()) {
semanticModel.setMetrics(getMetrics(metricYamlTpls)); ontology.setMetrics(getMetrics(metricYamlTpls));
} }
return semanticModel; return ontology;
} }
public SemanticModel getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception {
throws Exception {
if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) { if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) {
throw new Exception("semanticSchemaResp tag is empty"); throw new Exception("semanticSchemaResp tag is empty");
} }
SemanticModel semanticModel = getSemanticModel(semanticSchemaResp); Ontology ontology = buildOntology(semanticSchemaResp);
// Map<String, List<Dimension>> dimensions = new HashMap<>(); // Map<String, List<Dimension>> dimensions = new HashMap<>();
Map<Long, List<TagResp>> tagMap = new HashMap<>(); Map<Long, List<TagResp>> tagMap = new HashMap<>();
for (TagResp tagResp : semanticSchemaResp.getTags()) { for (TagResp tagResp : semanticSchemaResp.getTags()) {
@@ -105,25 +103,23 @@ public class SemanticSchemaManager {
} }
tagMap.get(tagResp.getModelId()).add(tagResp); tagMap.get(tagResp.getModelId()).add(tagResp);
} }
if (Objects.nonNull(semanticModel.getDatasourceMap()) if (Objects.nonNull(ontology.getDataModelMap()) && !ontology.getDataModelMap().isEmpty()) {
&& !semanticModel.getDatasourceMap().isEmpty()) { for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
for (Map.Entry<String, DataSource> entry : semanticModel.getDatasourceMap()
.entrySet()) {
List<Dimension> modelDimensions = new ArrayList<>(); List<Dimension> modelDimensions = new ArrayList<>();
if (!semanticModel.getDimensionMap().containsKey(entry.getKey())) { if (!ontology.getDimensionMap().containsKey(entry.getKey())) {
semanticModel.getDimensionMap().put(entry.getKey(), modelDimensions); ontology.getDimensionMap().put(entry.getKey(), modelDimensions);
} else { } else {
modelDimensions = semanticModel.getDimensionMap().get(entry.getKey()); modelDimensions = ontology.getDimensionMap().get(entry.getKey());
} }
if (tagMap.containsKey(entry.getValue().getId())) { if (tagMap.containsKey(entry.getValue().getId())) {
for (TagResp tagResp : tagMap.get(entry.getValue().getId())) { for (TagResp tagResp : tagMap.get(entry.getValue().getId())) {
addTagModel(tagResp, modelDimensions, semanticModel.getMetrics()); addTagModel(tagResp, modelDimensions, ontology.getMetrics());
} }
} }
} }
} }
return semanticModel; return ontology;
} }
private void addTagModel(TagResp tagResp, List<Dimension> modelDimensions, private void addTagModel(TagResp tagResp, List<Dimension> modelDimensions,
@@ -178,30 +174,30 @@ public class SemanticSchemaManager {
return getDimension(t); return getDimension(t);
} }
public static DataSource getDatasource(final DataModelYamlTpl d) { public static DataModel getDataModel(final DataModelYamlTpl d) {
DataSource datasource = DataSource.builder().id(d.getId()).sourceId(d.getSourceId()) DataModel dataModel = DataModel.builder().id(d.getId()).modelId(d.getSourceId())
.type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName()) .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName())
.tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers())) .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
.measures(getMeasureParams(d.getMeasures())) .measures(getMeasureParams(d.getMeasures()))
.dimensions(getDimensions(d.getDimensions())).build(); .dimensions(getDimensions(d.getDimensions())).build();
datasource.setAggTime(getDataSourceAggTime(datasource.getDimensions())); dataModel.setAggTime(getDataModelAggTime(dataModel.getDimensions()));
if (Objects.nonNull(d.getModelSourceTypeEnum())) { if (Objects.nonNull(d.getModelSourceTypeEnum())) {
datasource.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
} }
if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) { if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) {
Set<String> measures = datasource.getMeasures().stream().map(mm -> mm.getName()) Set<String> measures = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
for (Field f : d.getFields()) { for (Field f : d.getFields()) {
if (!measures.contains(f.getFieldName())) { if (!measures.contains(f.getFieldName())) {
datasource.getMeasures().add(Measure.builder().expr(f.getFieldName()) dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName())
.name(f.getFieldName()).agg("").build()); .name(f.getFieldName()).agg("").build());
} }
} }
} }
return datasource; return dataModel;
} }
private static String getDataSourceAggTime(List<Dimension> dimensions) { private static String getDataModelAggTime(List<Dimension> dimensions) {
Optional<Dimension> timeDimension = dimensions.stream() Optional<Dimension> timeDimension = dimensions.stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); .findFirst();
@@ -356,39 +352,39 @@ public class SemanticSchemaManager {
return joinRelations; return joinRelations;
} }
public static void update(SemanticSchema schema, List<Metric> metric) throws Exception { public static void update(S2CalciteSchema schema, List<Metric> metric) throws Exception {
if (schema != null) { if (schema != null) {
updateMetric(metric, schema.getMetrics()); updateMetric(metric, schema.getMetrics());
} }
} }
public static void update(SemanticSchema schema, DataSource datasourceYamlTpl) public static void update(S2CalciteSchema schema, DataModel datasourceYamlTpl)
throws Exception { throws Exception {
if (schema != null) { if (schema != null) {
String dataSourceName = datasourceYamlTpl.getName(); String dataSourceName = datasourceYamlTpl.getName();
Optional<Entry<String, DataSource>> datasourceYamlTplMap = Optional<Entry<String, DataModel>> datasourceYamlTplMap =
schema.getDatasource().entrySet().stream() schema.getDataModels().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst(); .filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
if (datasourceYamlTplMap.isPresent()) { if (datasourceYamlTplMap.isPresent()) {
datasourceYamlTplMap.get().setValue(datasourceYamlTpl); datasourceYamlTplMap.get().setValue(datasourceYamlTpl);
} else { } else {
schema.getDatasource().put(dataSourceName, datasourceYamlTpl); schema.getDataModels().put(dataSourceName, datasourceYamlTpl);
} }
} }
} }
public static void update(SemanticSchema schema, String datasourceBizName, public static void update(S2CalciteSchema schema, String datasourceBizName,
List<Dimension> dimensionYamlTpls) throws Exception { List<Dimension> dimensionYamlTpls) throws Exception {
if (schema != null) { if (schema != null) {
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema
.getDimension().entrySet().stream() .getDimensions().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst(); .filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
if (datasourceYamlTplMap.isPresent()) { if (datasourceYamlTplMap.isPresent()) {
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue()); updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
} else { } else {
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
updateDimension(dimensionYamlTpls, dimensions); updateDimension(dimensionYamlTpls, dimensions);
schema.getDimension().put(datasourceBizName, dimensions); schema.getDimensions().put(datasourceBizName, dimensions);
} }
} }
} }

View File

@@ -126,7 +126,7 @@ public class DomainServiceImpl implements DomainService {
return domainWithAuth.stream().peek(domainResp -> domainResp.setHasEditPermission(true)) return domainWithAuth.stream().peek(domainResp -> domainResp.setHasEditPermission(true))
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }
if (authTypeEnum.equals(AuthType.VISIBLE)) { if (authTypeEnum.equals(AuthType.VIEWER)) {
domainWithAuth = domainResps.stream() domainWithAuth = domainResps.stream()
.filter(domainResp -> checkViewPermission(orgIds, user, domainResp)) .filter(domainResp -> checkViewPermission(orgIds, user, domainResp))
.collect(Collectors.toSet()); .collect(Collectors.toSet());

View File

@@ -428,7 +428,7 @@ public class ModelServiceImpl implements ModelService {
.filter(modelResp -> checkAdminPermission(orgIds, user, modelResp)) .filter(modelResp -> checkAdminPermission(orgIds, user, modelResp))
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
if (authTypeEnum.equals(AuthType.VISIBLE)) { if (authTypeEnum.equals(AuthType.VIEWER)) {
modelWithAuth = modelResps.stream() modelWithAuth = modelResps.stream()
.filter(domainResp -> checkDataSetPermission(orgIds, user, domainResp)) .filter(domainResp -> checkDataSetPermission(orgIds, user, domainResp))
.collect(Collectors.toList()); .collect(Collectors.toList());

View File

@@ -32,7 +32,7 @@ public class MetricDrillDownChecker {
public void checkQuery(QueryStatement queryStatement) { public void checkQuery(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
String sql = queryStatement.getDataSetQueryParam().getSql(); String sql = queryStatement.getSql();
if (StringUtils.isBlank(sql)) { if (StringUtils.isBlank(sql)) {
return; return;
} }

View File

@@ -1,376 +0,0 @@
package com.tencent.supersonic.headless.server.utils;
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component
@Slf4j
public class QueryReqConverter {
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private SqlGenerateUtils sqlGenerateUtils;
public QueryStatement convert(QuerySqlReq querySQLReq, SemanticSchemaResp semanticSchemaResp)
throws Exception {
if (semanticSchemaResp == null) {
return new QueryStatement();
}
// 1.convert name to bizName
convertNameToBizName(querySQLReq, semanticSchemaResp);
// 2.functionName corrector
functionNameCorrector(querySQLReq, semanticSchemaResp);
// 3.correct tableName
correctTableName(querySQLReq);
// 4.remove Underscores
querySQLReq.setSql(SqlRemoveHelper.removeUnderscores(querySQLReq.getSql()));
String tableName = SqlSelectHelper.getTableName(querySQLReq.getSql());
if (StringUtils.isEmpty(tableName)) {
return new QueryStatement();
}
// correct order item is same as agg alias
String reqSql = querySQLReq.getSql();
querySQLReq.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(querySQLReq.getSql()));
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, querySQLReq.getSql());
// 5.build MetricTables
List<String> allFields = SqlSelectHelper.getAllSelectFields(querySQLReq.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList());
QueryStructReq queryStructReq = new QueryStructReq();
MetricTable metricTable = new MetricTable();
metricTable.setMetrics(metrics);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
metricTable.setDimensions(new ArrayList<>(dimensions));
metricTable.setAlias(tableName.toLowerCase());
// if metric empty , fill model default
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
metricTable.setMetrics(new ArrayList<>());
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
} else {
queryStructReq.setAggregators(metricTable.getMetrics().stream()
.map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN))
.collect(Collectors.toList()));
}
AggOption aggOption = getAggOption(querySQLReq, metricSchemas);
metricTable.setAggOption(aggOption);
List<MetricTable> tables = new ArrayList<>();
tables.add(metricTable);
// 6.build ParseSqlReq
DataSetQueryParam result = new DataSetQueryParam();
BeanUtils.copyProperties(querySQLReq, result);
result.setTables(tables);
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
database.getVersion())) {
result.setSupportWith(false);
result.setWithAlias(false);
}
// 7. do deriveMetric
generateDerivedMetric(semanticSchemaResp, aggOption, result);
// 8.physicalSql by ParseSqlReq
queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(querySQLReq.getSql()));
queryStructReq.setDataSetId(querySQLReq.getDataSetId());
queryStructReq.setQueryType(getQueryType(aggOption));
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
QueryParam queryParam = new QueryParam();
convert(queryStructReq, queryParam);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryParam(queryParam);
queryStatement.setDataSetQueryParam(result);
queryStatement.setIsS2SQL(true);
queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
queryStatement.setDataSetId(querySQLReq.getDataSetId());
queryStatement.setLimit(querySQLReq.getLimit());
return queryStatement;
}
public void convert(QueryStructReq queryStructReq, QueryParam queryParam) {
BeanUtils.copyProperties(queryStructReq, queryParam);
queryParam.setOrders(queryStructReq.getOrders());
queryParam.setMetrics(queryStructReq.getMetrics());
queryParam.setGroups(queryStructReq.getGroups());
}
private AggOption getAggOption(QuerySqlReq databaseReq, List<MetricSchemaResp> metricSchemas) {
String sql = databaseReq.getSql();
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
log.debug("getAggOption simple sql set to DEFAULT");
return AggOption.DEFAULT;
}
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
return AggOption.OUTER;
}
if (databaseReq.isInnerLayerNative()) {
return AggOption.NATIVE;
}
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|| SqlSelectHelper.hasGroupBy(sql)) {
return AggOption.OUTER;
}
long defaultAggNullCnt = metricSchemas.stream().filter(
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
.count();
if (defaultAggNullCnt > 0) {
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
return AggOption.OUTER;
}
return AggOption.DEFAULT;
}
private void convertNameToBizName(QuerySqlReq querySqlReq,
SemanticSchemaResp semanticSchemaResp) {
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = querySqlReq.getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", querySqlReq.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceSqlByPositions(sql);
log.debug("replaceSqlByPositions:{}", sql);
String replaceFields = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", querySqlReq.getDataSetId(),
replaceFields);
querySqlReq.setSql(replaceFields);
}
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
SchemaItem::getBizName, (k1, k2) -> k1));
Map<String, String> internalLowerToNameMap = QueryStructUtils.internalCols.stream()
.collect(Collectors.toMap(String::toLowerCase, a -> a));
dimensionLowerToNameMap.putAll(internalLowerToNameMap);
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toSet());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void functionNameCorrector(QuerySqlReq databaseReq,
SemanticSchemaResp semanticSchemaResp) {
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
if (Objects.nonNull(engineAdaptor)) {
String functionNameCorrector =
engineAdaptor.functionNameCorrector(databaseReq.getSql());
databaseReq.setSql(functionNameCorrector);
}
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
public void correctTableName(QuerySqlReq querySqlReq) {
String sql = querySqlReq.getSql();
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + querySqlReq.getDataSetId());
log.debug("correctTableName after:{}", sql);
querySqlReq.setSql(sql);
}
private QueryType getQueryType(AggOption aggOption) {
boolean isAgg = AggOption.isAgg(aggOption);
QueryType queryType = QueryType.DETAIL;
if (isAgg) {
queryType = QueryType.AGGREGATE;
}
return queryType;
}
private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption,
DataSetQueryParam viewQueryParam) {
String sql = viewQueryParam.getSql();
for (MetricTable metricTable : viewQueryParam.getTables()) {
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(semanticSchemaResp, aggOption,
metricTable.getMetrics(), metricTable.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
metricTable.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
metricTable.setMetrics(new ArrayList<>(measures));
} else {
// empty measure , fill default
metricTable.setMetrics(new ArrayList<>());
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
}
}
}
viewQueryParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
AggOption aggOption, List<String> metrics, List<String> dimensions,
Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List<String> dimensions) {
if (!CollectionUtils.isEmpty(dimensions)) {
Map<String, Long> modelMatchCnt = new HashMap<>();
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(m -> m.getKey()).findFirst().orElse("");
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
}
}

View File

@@ -140,15 +140,15 @@ public class QueryUtils {
return null; return null;
} }
public QueryStatement sqlParserUnion(QueryMultiStructReq queryMultiStructCmd, public QueryStatement unionAll(QueryMultiStructReq queryMultiStructCmd,
List<QueryStatement> sqlParsers) { List<QueryStatement> queryStatements) {
QueryStatement sqlParser = new QueryStatement(); QueryStatement sqlParser = new QueryStatement();
StringBuilder unionSqlBuilder = new StringBuilder(); StringBuilder unionSqlBuilder = new StringBuilder();
for (int i = 0; i < sqlParsers.size(); i++) { for (int i = 0; i < queryStatements.size(); i++) {
String selectStr = SqlGenerateUtils String selectStr = SqlGenerateUtils
.getUnionSelect(queryMultiStructCmd.getQueryStructReqs().get(i)); .getUnionSelect(queryMultiStructCmd.getQueryStructReqs().get(i));
unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s", selectStr, unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s", selectStr,
sqlParsers.get(i).getSql(), i)); queryStatements.get(i).getSql(), i));
unionSqlBuilder.append(UNIONALL); unionSqlBuilder.append(UNIONALL);
} }
String unionSql = unionSqlBuilder.substring(0, String unionSql = unionSqlBuilder.substring(0,

View File

@@ -6,8 +6,8 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp; import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
@@ -20,16 +20,12 @@ import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
@Slf4j @Slf4j
class HeadlessParserServiceTest { class HeadlessParserServiceTest {
private static Map<String, SemanticSchema> headlessSchemaMap = new HashMap<>(); public static SqlParserResp parser(S2CalciteSchema semanticSchema,
public static SqlParserResp parser(SemanticSchema semanticSchema,
MetricQueryParam metricQueryParam, boolean isAgg) { MetricQueryParam metricQueryParam, boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp(); SqlParserResp sqlParser = new SqlParserResp();
try { try {
@@ -37,14 +33,13 @@ class HeadlessParserServiceTest {
sqlParser.setErrMsg("headlessSchema not found"); sqlParser.setErrMsg("headlessSchema not found");
return sqlParser; return sqlParser;
} }
AggPlanner aggBuilder = new AggPlanner(semanticSchema); SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricQueryParam(metricQueryParam); queryStatement.setMetricQueryParam(metricQueryParam);
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg)); aggBuilder.build(queryStatement, AggOption.getAggregation(!isAgg));
EngineType engineType = EngineType EngineType engineType =
.fromString(semanticSchema.getSemanticModel().getDatabase().getType()); EngineType.fromString(semanticSchema.getOntology().getDatabase().getType());
sqlParser.setSql(aggBuilder.getSql(engineType)); sqlParser.setSql(aggBuilder.getSql(engineType));
sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) { } catch (Exception e) {
sqlParser.setErrMsg(e.getMessage()); sqlParser.setErrMsg(e.getMessage());
log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e); log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e);
@@ -122,10 +117,10 @@ class HeadlessParserServiceTest {
identify.setType("primary"); identify.setType("primary");
identifies.add(identify); identifies.add(identify);
datasource.setIdentifiers(identifies); datasource.setIdentifiers(identifies);
SemanticSchema semanticSchema = SemanticSchema.newBuilder("1").build(); S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build();
SemanticSchemaManager.update(semanticSchema, SemanticSchemaManager.update(semanticSchema,
SemanticSchemaManager.getDatasource(datasource)); SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("page"); dimension1.setExpr("page");
@@ -192,7 +187,7 @@ class HeadlessParserServiceTest {
System.out.println(parser(semanticSchema, metricCommand2, true)); System.out.println(parser(semanticSchema, metricCommand2, true));
} }
private static void addDepartment(SemanticSchema semanticSchema) { private static void addDepartment(S2CalciteSchema semanticSchema) {
DataModelYamlTpl datasource = new DataModelYamlTpl(); DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setName("user_department"); datasource.setName("user_department");
datasource.setSourceId(1L); datasource.setSourceId(1L);
@@ -238,8 +233,8 @@ class HeadlessParserServiceTest {
identifies.add(identify); identifies.add(identify);
datasource.setIdentifiers(identifies); datasource.setIdentifiers(identifies);
semanticSchema.getDatasource().put("user_department", semanticSchema.getDataModels().put("user_department",
SemanticSchemaManager.getDatasource(datasource)); SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department"); dimension1.setExpr("department");
@@ -248,7 +243,7 @@ class HeadlessParserServiceTest {
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>(); List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1); dimensionYamlTpls.add(dimension1);
semanticSchema.getDimension().put("user_department", semanticSchema.getDimensions().put("user_department",
SemanticSchemaManager.getDimensions(dimensionYamlTpls)); SemanticSchemaManager.getDimensions(dimensionYamlTpls));
} }
} }

View File

@@ -167,6 +167,8 @@ public class S2SingerDemo extends S2BaseDemo {
Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT));
chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId()));
agent.setChatAppConfig(chatAppConfig); agent.setChatAppConfig(chatAppConfig);
agent.setAdmins(Lists.newArrayList("alice"));
agent.setViewers(Lists.newArrayList("tom", "jack"));
agentService.createAgent(agent, defaultUser); agentService.createAgent(agent, defaultUser);
} }
} }

View File

@@ -40,6 +40,8 @@ public class S2SmallTalkDemo extends S2BaseDemo {
chatAppConfig.get(PlainTextExecutor.APP_KEY).setEnable(true); chatAppConfig.get(PlainTextExecutor.APP_KEY).setEnable(true);
chatAppConfig.get(OnePassSCSqlGenStrategy.APP_KEY).setEnable(false); chatAppConfig.get(OnePassSCSqlGenStrategy.APP_KEY).setEnable(false);
agent.setChatAppConfig(chatAppConfig); agent.setChatAppConfig(chatAppConfig);
agent.setAdmins(Lists.newArrayList("jack"));
agent.setViewers(Lists.newArrayList("alice", "tom"));
agentService.createAgent(agent, defaultUser); agentService.createAgent(agent, defaultUser);
} }

View File

@@ -134,7 +134,7 @@ public class S2VisitsDemo extends S2BaseDemo {
private void addSampleChats(Integer agentId) { private void addSampleChats(Integer agentId) {
Long chatId = chatManageService.addChat(defaultUser, "样例对话1", agentId); Long chatId = chatManageService.addChat(defaultUser, "样例对话1", agentId);
submitText(chatId.intValue(), agentId, "超音数 访问次数"); submitText(chatId.intValue(), agentId, "访问过超音数的部门有哪些");
submitText(chatId.intValue(), agentId, "按部门统计近7天访问次数"); submitText(chatId.intValue(), agentId, "按部门统计近7天访问次数");
submitText(chatId.intValue(), agentId, "alice 停留时长"); submitText(chatId.intValue(), agentId, "alice 停留时长");
} }
@@ -162,6 +162,8 @@ public class S2VisitsDemo extends S2BaseDemo {
Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT));
chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId()));
agent.setChatAppConfig(chatAppConfig); agent.setChatAppConfig(chatAppConfig);
agent.setAdmins(Lists.newArrayList("tom"));
agent.setViewers(Lists.newArrayList("alice", "jack"));
Agent agentCreated = agentService.createAgent(agent, defaultUser); Agent agentCreated = agentService.createAgent(agent, defaultUser);
return agentCreated.getId(); return agentCreated.getId();
} }
@@ -444,7 +446,7 @@ public class S2VisitsDemo extends S2BaseDemo {
termReq1.setDescription("用户为tom和lucy"); termReq1.setDescription("用户为tom和lucy");
termReq1.setAlias(Lists.newArrayList("VIP用户")); termReq1.setAlias(Lists.newArrayList("VIP用户"));
termReq1.setDomainId(s2Domain.getId()); termReq1.setDomainId(s2Domain.getId());
termService.saveOrUpdate(termReq, defaultUser); termService.saveOrUpdate(termReq1, defaultUser);
} }
private void addAuthGroup_1(ModelResp stayTimeModel) { private void addAuthGroup_1(ModelResp stayTimeModel) {

View File

@@ -393,4 +393,8 @@ ALTER TABLE s2_agent DROP COLUMN `multi_turn_config`;
ALTER TABLE s2_agent DROP COLUMN `enable_memory_review`; ALTER TABLE s2_agent DROP COLUMN `enable_memory_review`;
--20241012 --20241012
alter table s2_agent add column `enable_feedback` tinyint DEFAULT 1; alter table s2_agent add column `enable_feedback` tinyint DEFAULT 1;
--20241116
alter table s2_agent add column `admin` varchar(1000);
alter table s2_agent add column `viewer` varchar(1000);

View File

@@ -398,6 +398,8 @@ CREATE TABLE IF NOT EXISTS s2_agent
updated_at TIMESTAMP null, updated_at TIMESTAMP null,
enable_search int null, enable_search int null,
enable_feedback int null, enable_feedback int null,
admin varchar(1000),
viewer varchar(1000),
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
); COMMENT ON TABLE s2_agent IS 'agent information table'; ); COMMENT ON TABLE s2_agent IS 'agent information table';

View File

@@ -50,8 +50,7 @@ public class SchemaAuthTest extends BaseTest {
@Test @Test
public void test_getVisibleModelList_alice() { public void test_getVisibleModelList_alice() {
User user = DataUtils.getUserAlice(); User user = DataUtils.getUserAlice();
List<ModelResp> modelResps = List<ModelResp> modelResps = modelService.getModelListWithAuth(user, null, AuthType.VIEWER);
modelService.getModelListWithAuth(user, null, AuthType.VISIBLE);
List<String> expectedModelBizNames = Lists.newArrayList("user_department", "singer"); List<String> expectedModelBizNames = Lists.newArrayList("user_department", "singer");
Assertions.assertEquals(expectedModelBizNames, Assertions.assertEquals(expectedModelBizNames,
modelResps.stream().map(ModelResp::getBizName).collect(Collectors.toList())); modelResps.stream().map(ModelResp::getBizName).collect(Collectors.toList()));

View File

@@ -398,6 +398,8 @@ CREATE TABLE IF NOT EXISTS s2_agent
updated_at TIMESTAMP null, updated_at TIMESTAMP null,
enable_search int null, enable_search int null,
enable_feedback int null, enable_feedback int null,
admin varchar(1000),
viewer varchar(1000),
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
); COMMENT ON TABLE s2_agent IS 'agent information table'; ); COMMENT ON TABLE s2_agent IS 'agent information table';

View File

@@ -3,6 +3,9 @@ import { NumericUnit } from '../common/constants';
import { isString } from 'lodash'; import { isString } from 'lodash';
export function formatByDecimalPlaces(value: number | string, decimalPlaces: number) { export function formatByDecimalPlaces(value: number | string, decimalPlaces: number) {
if (value === null || value === undefined || value === '') {
return 0;
}
if (isNaN(+value) || decimalPlaces < 0 || decimalPlaces > 100) { if (isNaN(+value) || decimalPlaces < 0 || decimalPlaces > 100) {
return value; return value;
} }
@@ -17,6 +20,9 @@ export function formatByDecimalPlaces(value: number | string, decimalPlaces: num
} }
export function formatByThousandSeperator(value: number | string) { export function formatByThousandSeperator(value: number | string) {
if (value === null || value === undefined || value === '') {
return 0;
}
if (isNaN(+value)) { if (isNaN(+value)) {
return value; return value;
} }

View File

@@ -7,3 +7,12 @@
.userText { .userText {
margin-left: 10px; margin-left: 10px;
} }
.selectPerson {
:global {
.ant-select-selection-item {
color: rgba(0, 0, 0, 0.88)!important;
background-color: rgba(0, 0, 0, 0.06)!important;
}
}
}

View File

@@ -37,6 +37,7 @@ const SelectTMEPerson: FC<Props> = ({ placeholder, value, isMultiple = true, onC
mode={isMultiple ? 'multiple' : undefined} mode={isMultiple ? 'multiple' : undefined}
allowClear allowClear
showSearch showSearch
className={styles.selectPerson}
onChange={onChange} onChange={onChange}
> >
{userList.map((item) => { {userList.map((item) => {

View File

@@ -8,6 +8,8 @@ import { uuid, jsonParse } from '@/utils/utils';
import ToolsSection from './ToolsSection'; import ToolsSection from './ToolsSection';
import globalStyles from '@/global.less'; import globalStyles from '@/global.less';
import { QuestionCircleOutlined } from '@ant-design/icons'; import { QuestionCircleOutlined } from '@ant-design/icons';
import SelectTMEPerson from '@/components/SelectTMEPerson';
import FormItemTitle from '@/components/FormHelper/FormItemTitle';
import { getLlmModelTypeList, getLlmModelAppList, getLlmList } from '../../services/system'; import { getLlmModelTypeList, getLlmModelAppList, getLlmList } from '../../services/system';
import MemorySection from './MemorySection'; import MemorySection from './MemorySection';
@@ -223,7 +225,16 @@ const AgentForm: React.FC<Props> = ({ editAgent, onSaveAgent, onCreateToolBtnCli
> >
<Switch /> <Switch />
</FormItem> </FormItem>
<FormItem
name="admins"
label="管理员"
// rules={[{ required: true, message: '请设定数据库连接管理者' }]}
>
<SelectTMEPerson placeholder="请邀请团队成员" />
</FormItem>
<FormItem tooltip="选择用户后,该助理只对所选用户可见" name="viewers" label="使用者">
<SelectTMEPerson placeholder="请邀请团队成员" />
</FormItem>
<FormItem name="examples" label="示例问题"> <FormItem name="examples" label="示例问题">
<div className={styles.paramsSection}> <div className={styles.paramsSection}>
{examples.map((example) => { {examples.map((example) => {

View File

@@ -207,26 +207,26 @@ const ModelFieldForm: React.FC<Props> = ({
// width: 200, // width: 200,
render: (_: any, record: FieldItem) => { render: (_: any, record: FieldItem) => {
const { type } = record; const { type } = record;
if (type === EnumDataSourceType.PRIMARY) { // if (type === EnumDataSourceType.PRIMARY) {
return ( // return (
<Space> // <Space>
<Select // <Select
style={{ minWidth: 150 }} // style={{ minWidth: 150 }}
value={tagObjectId ? tagObjectId : undefined} // value={tagObjectId ? tagObjectId : undefined}
placeholder="请选择所属对象" // placeholder="请选择所属对象"
onChange={(value) => { // onChange={(value) => {
onTagObjectChange?.(value); // onTagObjectChange?.(value);
}} // }}
options={tagObjectList.map((item: ISemantic.ITagObjectItem) => { // options={tagObjectList.map((item: ISemantic.ITagObjectItem) => {
return { // return {
label: item.name, // label: item.name,
value: item.id, // value: item.id,
}; // };
})} // })}
/> // />
</Space> // </Space>
); // );
} // }
if (type === EnumDataSourceType.MEASURES) { if (type === EnumDataSourceType.MEASURES) {
return ( return (
<Select <Select