From 36d221ab74b5b27cb0be568ca99a1839295c793a Mon Sep 17 00:00:00 2001 From: lxwcodemonkey Date: Sat, 16 Nov 2024 21:44:50 +0800 Subject: [PATCH 01/88] [improvement][Chat] Support agent permission management #1143 --- .../supersonic/chat/server/agent/Agent.java | 10 +++++++ .../persistence/dataobject/AgentDO.java | 4 +++ .../chat/server/rest/AgentController.java | 9 +++++-- .../chat/server/service/AgentService.java | 2 ++ .../server/service/impl/AgentServiceImpl.java | 26 +++++++++++++++++++ .../common/pojo/enums/AuthType.java | 2 +- .../server/aspect/S2DataPermissionAspect.java | 5 ++-- .../service/impl/DomainServiceImpl.java | 2 +- .../server/service/impl/ModelServiceImpl.java | 2 +- .../tencent/supersonic/demo/S2SingerDemo.java | 2 ++ .../supersonic/demo/S2SmallTalkDemo.java | 2 ++ .../tencent/supersonic/demo/S2VisitsDemo.java | 2 ++ .../resources/config.update/sql-update.sql | 6 ++++- .../src/main/resources/db/schema-h2.sql | 2 ++ .../supersonic/headless/SchemaAuthTest.java | 2 +- .../src/test/resources/db/schema-h2.sql | 2 ++ 16 files changed, 70 insertions(+), 10 deletions(-) diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/agent/Agent.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/agent/Agent.java index 0374f10a3..36da61d57 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/agent/Agent.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/agent/Agent.java @@ -1,9 +1,11 @@ package com.tencent.supersonic.chat.server.agent; import com.alibaba.fastjson.JSONObject; +import com.google.common.collect.Lists; import com.tencent.supersonic.chat.server.memory.MemoryReviewTask; import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.RecordInfo; +import com.tencent.supersonic.common.pojo.User; import lombok.Data; import org.springframework.util.CollectionUtils; @@ -12,6 +14,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; @Data @@ -33,6 +36,8 @@ public class Agent extends RecordInfo { private String toolConfig; private Map chatAppConfig = Collections.emptyMap(); private VisualConfig visualConfig; + private List admins = Lists.newArrayList(); + private List viewers = Lists.newArrayList(); public List getTools(AgentToolType type) { Map map = JSONObject.parseObject(toolConfig, Map.class); @@ -105,4 +110,9 @@ public class Agent extends RecordInfo { .filter(dataSetIds -> !CollectionUtils.isEmpty(dataSetIds)) .flatMap(Collection::stream).collect(Collectors.toSet()); } + + public boolean contains(User user, Function> list) { + return list.apply(this).contains(user.getName()); + } + } diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/persistence/dataobject/AgentDO.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/persistence/dataobject/AgentDO.java index 58645c621..a71596e82 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/persistence/dataobject/AgentDO.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/persistence/dataobject/AgentDO.java @@ -40,4 +40,8 @@ public class AgentDO { private String chatModelConfig; private String visualConfig; + + private String admin; + + private String viewer; } diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/AgentController.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/AgentController.java index 0cb4ddee0..ff3e01b18 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/AgentController.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/AgentController.java @@ -8,6 +8,7 @@ import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.chat.server.agent.AgentToolType; import com.tencent.supersonic.chat.server.service.AgentService; import com.tencent.supersonic.common.pojo.User; +import com.tencent.supersonic.common.pojo.enums.AuthType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.PathVariable; @@ -15,6 +16,7 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.util.List; @@ -48,8 +50,11 @@ public class AgentController { } @RequestMapping("/getAgentList") - public List getAgentList() { - return agentService.getAgents(); + public List getAgentList( + @RequestParam(value = "authType", required = false) AuthType authType, + HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) { + User user = UserHolder.findUser(httpServletRequest, httpServletResponse); + return agentService.getAgents(user, authType); } @RequestMapping("/getToolTypes") diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/AgentService.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/AgentService.java index 147ff2615..d5d24fcb7 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/AgentService.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/AgentService.java @@ -2,10 +2,12 @@ package com.tencent.supersonic.chat.server.service; import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.common.pojo.User; +import com.tencent.supersonic.common.pojo.enums.AuthType; import java.util.List; public interface AgentService { + List getAgents(User user, AuthType authType); List getAgents(); diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/AgentServiceImpl.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/AgentServiceImpl.java index c048c59fc..591d3a21b 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/AgentServiceImpl.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/AgentServiceImpl.java @@ -14,6 +14,7 @@ import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.common.config.ChatModel; import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.User; +import com.tencent.supersonic.common.pojo.enums.AuthType; import com.tencent.supersonic.common.service.ChatModelService; import com.tencent.supersonic.common.util.JsonUtil; import lombok.extern.slf4j.Slf4j; @@ -43,6 +44,27 @@ public class AgentServiceImpl extends ServiceImpl implem private ExecutorService executorService = Executors.newFixedThreadPool(1); + @Override + public List getAgents(User user, AuthType authType) { + return getAgentDOList().stream().map(this::convert) + .filter(agent -> filterByAuth(agent, user, authType)).collect(Collectors.toList()); + } + + private boolean filterByAuth(Agent agent, User user, AuthType authType) { + if (user.isSuperAdmin() || user.getName().equals(agent.getCreatedBy())) { + return true; + } + authType = authType == null ? AuthType.VIEWER : authType; + switch (authType) { + case ADMIN: + return agent.contains(user, Agent::getAdmins); + case VIEWER: + default: + return agent.contains(user, Agent::getAdmins) + || agent.contains(user, Agent::getViewers); + } + } + @Override public List getAgents() { return getAgentDOList().stream().map(this::convert).collect(Collectors.toList()); @@ -135,6 +157,8 @@ public class AgentServiceImpl extends ServiceImpl implem c.setChatModelConfig(chatModelService.getChatModel(c.getChatModelId()).getConfig()); } }); + agent.setAdmins(JsonUtil.toList(agentDO.getAdmin(), String.class)); + agent.setViewers(JsonUtil.toList(agentDO.getViewer(), String.class)); return agent; } @@ -145,6 +169,8 @@ public class AgentServiceImpl extends ServiceImpl implem agentDO.setExamples(JsonUtil.toString(agent.getExamples())); agentDO.setChatModelConfig(JsonUtil.toString(agent.getChatAppConfig())); agentDO.setVisualConfig(JsonUtil.toString(agent.getVisualConfig())); + agentDO.setAdmin(JsonUtil.toString(agent.getAdmins())); + agentDO.setViewer(JsonUtil.toString(agent.getViewers())); if (agentDO.getStatus() == null) { agentDO.setStatus(1); } diff --git a/common/src/main/java/com/tencent/supersonic/common/pojo/enums/AuthType.java b/common/src/main/java/com/tencent/supersonic/common/pojo/enums/AuthType.java index 0c505a120..3df7e526b 100644 --- a/common/src/main/java/com/tencent/supersonic/common/pojo/enums/AuthType.java +++ b/common/src/main/java/com/tencent/supersonic/common/pojo/enums/AuthType.java @@ -1,5 +1,5 @@ package com.tencent.supersonic.common.pojo.enums; public enum AuthType { - VISIBLE, ADMIN + VIEWER, ADMIN } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/aspect/S2DataPermissionAspect.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/aspect/S2DataPermissionAspect.java index fdd2750c0..2cb623acc 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/aspect/S2DataPermissionAspect.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/aspect/S2DataPermissionAspect.java @@ -260,9 +260,8 @@ public class S2DataPermissionAspect { } public void checkModelVisible(User user, Set modelIds) { - List modelListVisible = - modelService.getModelListWithAuth(user, null, AuthType.VISIBLE).stream() - .map(ModelResp::getId).collect(Collectors.toList()); + List modelListVisible = modelService.getModelListWithAuth(user, null, AuthType.VIEWER) + .stream().map(ModelResp::getId).collect(Collectors.toList()); List modelIdCopied = new ArrayList<>(modelIds); modelIdCopied.removeAll(modelListVisible); if (!CollectionUtils.isEmpty(modelIdCopied)) { diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DomainServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DomainServiceImpl.java index fd9f69d43..be75ac482 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DomainServiceImpl.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DomainServiceImpl.java @@ -126,7 +126,7 @@ public class DomainServiceImpl implements DomainService { return domainWithAuth.stream().peek(domainResp -> domainResp.setHasEditPermission(true)) .collect(Collectors.toSet()); } - if (authTypeEnum.equals(AuthType.VISIBLE)) { + if (authTypeEnum.equals(AuthType.VIEWER)) { domainWithAuth = domainResps.stream() .filter(domainResp -> checkViewPermission(orgIds, user, domainResp)) .collect(Collectors.toSet()); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/ModelServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/ModelServiceImpl.java index f59d7e64e..e1c64a445 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/ModelServiceImpl.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/ModelServiceImpl.java @@ -428,7 +428,7 @@ public class ModelServiceImpl implements ModelService { .filter(modelResp -> checkAdminPermission(orgIds, user, modelResp)) .collect(Collectors.toList()); } - if (authTypeEnum.equals(AuthType.VISIBLE)) { + if (authTypeEnum.equals(AuthType.VIEWER)) { modelWithAuth = modelResps.stream() .filter(domainResp -> checkDataSetPermission(orgIds, user, domainResp)) .collect(Collectors.toList()); diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SingerDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SingerDemo.java index 3659659b7..1b40be2d5 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SingerDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SingerDemo.java @@ -167,6 +167,8 @@ public class S2SingerDemo extends S2BaseDemo { Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); agent.setChatAppConfig(chatAppConfig); + agent.setAdmins(Lists.newArrayList("alice")); + agent.setViewers(Lists.newArrayList("tom", "jack")); agentService.createAgent(agent, defaultUser); } } diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SmallTalkDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SmallTalkDemo.java index 943ed9c3f..7964d0bf8 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SmallTalkDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2SmallTalkDemo.java @@ -40,6 +40,8 @@ public class S2SmallTalkDemo extends S2BaseDemo { chatAppConfig.get(PlainTextExecutor.APP_KEY).setEnable(true); chatAppConfig.get(OnePassSCSqlGenStrategy.APP_KEY).setEnable(false); agent.setChatAppConfig(chatAppConfig); + agent.setAdmins(Lists.newArrayList("jack")); + agent.setViewers(Lists.newArrayList("alice", "tom")); agentService.createAgent(agent, defaultUser); } diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index 29748cb4c..87b282372 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -162,6 +162,8 @@ public class S2VisitsDemo extends S2BaseDemo { Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); agent.setChatAppConfig(chatAppConfig); + agent.setAdmins(Lists.newArrayList("tom")); + agent.setViewers(Lists.newArrayList("alice", "jack")); Agent agentCreated = agentService.createAgent(agent, defaultUser); return agentCreated.getId(); } diff --git a/launchers/standalone/src/main/resources/config.update/sql-update.sql b/launchers/standalone/src/main/resources/config.update/sql-update.sql index 0cefd0fb6..17c43e127 100644 --- a/launchers/standalone/src/main/resources/config.update/sql-update.sql +++ b/launchers/standalone/src/main/resources/config.update/sql-update.sql @@ -393,4 +393,8 @@ ALTER TABLE s2_agent DROP COLUMN `multi_turn_config`; ALTER TABLE s2_agent DROP COLUMN `enable_memory_review`; --20241012 -alter table s2_agent add column `enable_feedback` tinyint DEFAULT 1; \ No newline at end of file +alter table s2_agent add column `enable_feedback` tinyint DEFAULT 1; + +--20241116 +alter table s2_agent add column `admin` varchar(1000); +alter table s2_agent add column `viewer` varchar(1000); \ No newline at end of file diff --git a/launchers/standalone/src/main/resources/db/schema-h2.sql b/launchers/standalone/src/main/resources/db/schema-h2.sql index 71985491c..4da6f27c2 100644 --- a/launchers/standalone/src/main/resources/db/schema-h2.sql +++ b/launchers/standalone/src/main/resources/db/schema-h2.sql @@ -398,6 +398,8 @@ CREATE TABLE IF NOT EXISTS s2_agent updated_at TIMESTAMP null, enable_search int null, enable_feedback int null, + admin varchar(1000), + viewer varchar(1000), PRIMARY KEY (`id`) ); COMMENT ON TABLE s2_agent IS 'agent information table'; diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java index d813ef0aa..661262d50 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java @@ -51,7 +51,7 @@ public class SchemaAuthTest extends BaseTest { public void test_getVisibleModelList_alice() { User user = DataUtils.getUserAlice(); List modelResps = - modelService.getModelListWithAuth(user, null, AuthType.VISIBLE); + modelService.getModelListWithAuth(user, null, AuthType.VIEWER); List expectedModelBizNames = Lists.newArrayList("user_department", "singer"); Assertions.assertEquals(expectedModelBizNames, modelResps.stream().map(ModelResp::getBizName).collect(Collectors.toList())); diff --git a/launchers/standalone/src/test/resources/db/schema-h2.sql b/launchers/standalone/src/test/resources/db/schema-h2.sql index 7439debac..8584c6239 100644 --- a/launchers/standalone/src/test/resources/db/schema-h2.sql +++ b/launchers/standalone/src/test/resources/db/schema-h2.sql @@ -398,6 +398,8 @@ CREATE TABLE IF NOT EXISTS s2_agent updated_at TIMESTAMP null, enable_search int null, enable_feedback int null, + admin varchar(1000), + viewer varchar(1000), PRIMARY KEY (`id`) ); COMMENT ON TABLE s2_agent IS 'agent information table'; From a0f53359ef5f88b296b8d1caa03bbda1258ce618 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Sun, 17 Nov 2024 09:06:42 +0800 Subject: [PATCH 02/88] [improvement][chat]Iterate LLM prompts of parsing and correction. --- .../headless/chat/corrector/LLMSqlCorrector.java | 4 ++-- .../chat/parser/llm/OnePassSCSqlGenStrategy.java | 16 +++++++--------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/corrector/LLMSqlCorrector.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/corrector/LLMSqlCorrector.java index 5f8335cc8..a8ebb910a 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/corrector/LLMSqlCorrector.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/corrector/LLMSqlCorrector.java @@ -36,8 +36,8 @@ public class LLMSqlCorrector extends BaseSemanticCorrector { + "\n2.NO NEED to check date filters as the junior engineer seldom makes mistakes in this regard." + "\n3.DO NOT miss the AGGREGATE operator of metrics, always add it as needed." + "\n4.ALWAYS use `with` statement if nested aggregation is needed." - + "\n5.ALWAYS enclose alias created by `AS` command in underscores." - + "\n6.ALWAYS translate alias created by `AS` command to the same language as the `#Question`." + + "\n5.ALWAYS enclose alias declared by `AS` command in underscores." + + "\n6.Alias created by `AS` command must be in the same language ast the `Question`." + "\n#Question:{{question}} #InputSQL:{{sql}} #Response:"; public LLMSqlCorrector() { diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java index fd5f176e3..a41be00ab 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java @@ -36,15 +36,13 @@ public class OnePassSCSqlGenStrategy extends SqlGenStrategy { + "\n#Task: You will be provided with a natural language question asked by users," + "please convert it to a SQL query so that relevant data could be returned " + "by executing the SQL query against underlying database." + "\n#Rules:" - + "\n1.ALWAYS generate columns and values specified in the `Schema`, DO NOT hallucinate." - + "\n2.ALWAYS be cautious, word in the `Schema` does not mean it must appear in the SQL." - + "\n3.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator." - + "\n4.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`." - + "\n5.DO NOT calculate date range using functions." - + "\n6.DO NOT miss the AGGREGATE operator of metrics, always add it as needed." - + "\n7.ALWAYS use `with` statement if nested aggregation is needed." - + "\n8.ALWAYS enclose alias created by `AS` command in underscores." - + "\n9.ALWAYS translate alias created by `AS` command to the same language as the `#Question`." + + "\n1.SQL columns and values must be mentioned in the `Schema`, DO NOT hallucinate." + + "\n2.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator." + + "\n3.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`." + + "\n4.DO NOT calculate date range using functions." + + "\n5.ALWAYS use `with` statement if nested aggregation is needed." + + "\n6.ALWAYS enclose alias declared by `AS` command in underscores." + + "\n7.Alias created by `AS` command must be in the same language ast the `Question`." + "\n#Exemplars: {{exemplar}}" + "\n#Query: Question:{{question}},Schema:{{schema}},SideInfo:{{information}}"; From cd889b479c90cacb9946c93434e0f502866047b0 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Sun, 17 Nov 2024 22:54:45 +0800 Subject: [PATCH 03/88] [improvement][headless]Clean code logic of headless core. --- .../translator/DefaultSemanticTranslator.java | 14 +- .../calcite/CalciteQueryParser.java | 20 +-- .../calcite/planner/AggPlanner.java | 64 ++++----- .../translator/calcite/planner/Planner.java | 8 +- .../s2sql/{DataSource.java => DataModel.java} | 2 +- .../calcite/s2sql/SemanticModel.java | 6 +- ...anticSchema.java => S2SemanticSchema.java} | 15 ++- .../calcite/schema/SchemaBuilder.java | 2 +- .../translator/calcite/sql/Optimization.java | 4 +- .../core/translator/calcite/sql/Renderer.java | 20 +-- .../translator/calcite/sql/TableView.java | 4 +- .../calcite/sql/node/DataSourceNode.java | 122 ++++++++--------- .../calcite/sql/node/MetricNode.java | 4 +- .../calcite/sql/node/SemanticNode.java | 6 +- .../sql/optimizer/FilterToGroupScanRule.java | 7 +- .../calcite/sql/render/FilterRender.java | 12 +- .../calcite/sql/render/JoinRender.java | 125 +++++++++--------- .../calcite/sql/render/OutputRender.java | 8 +- .../calcite/sql/render/SourceRender.java | 51 ++++--- .../converter/ParserDefaultConverter.java | 4 +- .../converter/SqlVariableParseConverter.java | 6 +- .../server/manager/SemanticSchemaManager.java | 23 ++-- .../calcite/HeadlessParserServiceTest.java | 12 +- .../supersonic/headless/SchemaAuthTest.java | 3 +- 24 files changed, 264 insertions(+), 278 deletions(-) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/{DataSource.java => DataModel.java} (95%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/{SemanticSchema.java => S2SemanticSchema.java} (90%) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 0f59f50b7..988817643 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -42,7 +42,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } } - public void parse(QueryStatement queryStatement) throws Exception { + private void parse(QueryStatement queryStatement) throws Exception { QueryParam queryParam = queryStatement.getQueryParam(); if (Objects.isNull(queryStatement.getDataSetQueryParam())) { queryStatement.setDataSetQueryParam(new DataSetQueryParam()); @@ -64,7 +64,8 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } else { queryStatement.getMetricQueryParam() .setNativeQuery(queryParam.getQueryType().isNativeAggQuery()); - doParse(queryStatement); + doParse(queryStatement, + AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery())); } if (StringUtils.isEmpty(queryStatement.getSql())) { throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg()); @@ -77,7 +78,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } } - public QueryStatement doParse(DataSetQueryParam dataSetQueryParam, + private QueryStatement doParse(DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) { log.info("parse dataSetQuery [{}] ", dataSetQueryParam); SemanticModel semanticModel = queryStatement.getSemanticModel(); @@ -132,12 +133,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { return queryStatement; } - public QueryStatement doParse(QueryStatement queryStatement) { - return doParse(queryStatement, - AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery())); - } - - public QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) { + private QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) { MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam(); log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg); try { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index 4153f8f2e..128106b2a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -9,7 +9,7 @@ import com.tencent.supersonic.headless.core.translator.QueryParser; import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.parser.SqlParseException; import org.springframework.stereotype.Component; @@ -31,18 +31,18 @@ public class CalciteQueryParser implements QueryParser { return; } queryStatement.setMetricQueryParam(metricReq); - SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement); - AggPlanner aggBuilder = new AggPlanner(semanticSchema); - aggBuilder.explain(queryStatement, isAgg); + S2SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement); + AggPlanner aggPlanner = new AggPlanner(semanticSchema); + aggPlanner.plan(queryStatement, isAgg); EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType()); - queryStatement.setSql(aggBuilder.getSql(engineType)); + queryStatement.setSql(aggPlanner.getSql(engineType)); if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize() && Objects.nonNull(queryStatement.getDataSetAlias()) && !queryStatement.getDataSetAlias().isEmpty()) { // simplify model sql with query sql - String simplifySql = aggBuilder.simplify( - getSqlByDataSet(engineType, aggBuilder.getSql(engineType), + String simplifySql = aggPlanner.simplify( + getSqlByDataSet(engineType, aggPlanner.getSql(engineType), queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType); if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) { @@ -52,10 +52,10 @@ public class CalciteQueryParser implements QueryParser { } } - private SemanticSchema getSemanticSchema(SemanticModel semanticModel, + private S2SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) { - SemanticSchema semanticSchema = - SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build(); + S2SemanticSchema semanticSchema = + S2SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build(); semanticSchema.setSemanticModel(semanticModel); semanticSchema.setDatasource(semanticModel.getDatasourceMap()); semanticSchema.setDimension(semanticModel.getDimensionMap()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java index fd12cf821..4b1036f86 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java @@ -7,11 +7,10 @@ import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; -import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender; @@ -27,29 +26,27 @@ import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Objects; -import java.util.Stack; /** parsing from query dimensions and metrics */ @Slf4j public class AggPlanner implements Planner { private MetricQueryParam metricReq; - private SemanticSchema schema; + private final S2SemanticSchema schema; private SqlValidatorScope scope; - private Stack dataSets = new Stack<>(); private SqlNode parserNode; private String sourceId; private boolean isAgg = false; private AggOption aggOption = AggOption.DEFAULT; - public AggPlanner(SemanticSchema schema) { + public AggPlanner(S2SemanticSchema schema) { this.schema = schema; } - public void parse() throws Exception { + private void parse() throws Exception { // find the match Datasource scope = SchemaBuilder.getScope(schema); - List datasource = getMatchDataSource(scope); + List datasource = getMatchDataSource(scope); if (datasource == null || datasource.isEmpty()) { throw new Exception("datasource not found"); } @@ -78,16 +75,16 @@ public class AggPlanner implements Planner { parserNode = builders.getLast().builder(); } - private List getMatchDataSource(SqlValidatorScope scope) throws Exception { + private List getMatchDataSource(SqlValidatorScope scope) throws Exception { return DataSourceNode.getMatchDataSources(scope, schema, metricReq); } - private boolean getAgg(DataSource dataSource) { + private boolean getAgg(DataModel dataModel) { if (!AggOption.DEFAULT.equals(aggOption)) { return AggOption.isAgg(aggOption); } - // default by dataSource time aggregation - if (Objects.nonNull(dataSource.getAggTime()) && !dataSource.getAggTime() + // default by dataModel time aggregation + if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { if (!metricReq.isNativeQuery()) { return true; @@ -97,7 +94,7 @@ public class AggPlanner implements Planner { } @Override - public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception { + public void plan(QueryStatement queryStatement, AggOption aggOption) throws Exception { this.metricReq = queryStatement.getMetricQueryParam(); if (metricReq.getMetrics() == null) { metricReq.setMetrics(new ArrayList<>()); @@ -129,22 +126,6 @@ public class AggPlanner implements Planner { @Override public String simplify(String sql, EngineType engineType) { - return optimize(sql, engineType); - } - - public void optimize(EngineType engineType) { - if (Objects.isNull(schema.getRuntimeOptions()) - || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) - || !schema.getRuntimeOptions().getEnableOptimize()) { - return; - } - SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode, engineType), engineType); - if (Objects.nonNull(optimizeNode)) { - parserNode = optimizeNode; - } - } - - public String optimize(String sql, EngineType engineType) { try { SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); @@ -153,21 +134,32 @@ public class AggPlanner implements Planner { SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType); } } catch (Exception e) { - log.error("optimize error {}", e); + log.error("optimize error {}", e.toString()); } return ""; } - private SqlNode optimizeSql(String sql, EngineType engineType) { + private void optimize(EngineType engineType) { + if (Objects.isNull(schema.getRuntimeOptions()) + || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) + || !schema.getRuntimeOptions().getEnableOptimize()) { + return; + } + + SqlNode optimizeNode = null; try { - SqlNode sqlNode = - SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); + SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType), + Configuration.getParserConfig(engineType)).parseStmt(); if (Objects.nonNull(sqlNode)) { - return SemanticNode.optimize(scope, schema, sqlNode, engineType); + optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType); } } catch (Exception e) { log.error("optimize error {}", e); } - return null; + + if (Objects.nonNull(optimizeNode)) { + parserNode = optimizeNode; + } } + } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java index b39e6e5af..cccd9a71a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java @@ -7,11 +7,11 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement; /** parse and generate SQL and other execute information */ public interface Planner { - public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception; + void plan(QueryStatement queryStatement, AggOption aggOption) throws Exception; - public String getSql(EngineType enginType); + String getSql(EngineType enginType); - public String getSourceId(); + String getSourceId(); - public String simplify(String sql, EngineType engineType); + String simplify(String sql, EngineType engineType); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataSource.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java similarity index 95% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataSource.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java index 300744284..4486f9572 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataSource.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java @@ -7,7 +7,7 @@ import java.util.List; @Data @Builder -public class DataSource { +public class DataModel { private Long id; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java index 40b4e8171..af742891e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java @@ -15,7 +15,7 @@ public class SemanticModel { private String schemaKey; private List metrics = new ArrayList<>(); - private Map datasourceMap = new HashMap<>(); + private Map datasourceMap = new HashMap<>(); private Map> dimensionMap = new HashMap<>(); private List materializationList = new ArrayList<>(); private List joinRelations; @@ -26,8 +26,8 @@ public class SemanticModel { .collect(Collectors.toList()); } - public Map getModelMap() { + public Map getModelMap() { return datasourceMap.values().stream() - .collect(Collectors.toMap(DataSource::getId, dataSource -> dataSource)); + .collect(Collectors.toMap(DataModel::getId, dataSource -> dataSource)); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticSchema.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java similarity index 90% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticSchema.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java index c8afc0cd7..b53879817 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticSchema.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java @@ -1,6 +1,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.schema; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; @@ -15,9 +15,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -public class SemanticSchema extends AbstractSchema { +public class S2SemanticSchema extends AbstractSchema { private final String schemaKey; + private final Map tableMap; private SemanticModel semanticModel = new SemanticModel(); @@ -26,7 +27,7 @@ public class SemanticSchema extends AbstractSchema { private RuntimeOptions runtimeOptions; - private SemanticSchema(String schemaKey, Map tableMap) { + private S2SemanticSchema(String schemaKey, Map tableMap) { this.schemaKey = schemaKey; this.tableMap = tableMap; } @@ -57,11 +58,11 @@ public class SemanticSchema extends AbstractSchema { return this; } - public Map getDatasource() { + public Map getDatasource() { return semanticModel.getDatasourceMap(); } - public void setDatasource(Map datasource) { + public void setDatasource(Map datasource) { semanticModel.setDatasourceMap(datasource); } @@ -129,8 +130,8 @@ public class SemanticSchema extends AbstractSchema { return this; } - public SemanticSchema build() { - return new SemanticSchema(schemaKey, tableMap); + public S2SemanticSchema build() { + return new S2SemanticSchema(schemaKey, tableMap); } } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java index 8cad09dac..ec12bc402 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java @@ -27,7 +27,7 @@ public class SchemaBuilder { public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1"; public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2"; - public static SqlValidatorScope getScope(SemanticSchema schema) throws Exception { + public static SqlValidatorScope getScope(S2SemanticSchema schema) throws Exception { Map nameToTypeMap = new HashMap<>(); CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); rootSchema.add(schema.getSchemaKey(), schema); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java index 6df9889c2..3e73b3897 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java @@ -1,8 +1,8 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; public interface Optimization { - public void visit(SemanticSchema semanticSchema); + public void visit(S2SemanticSchema semanticSchema); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java index 088a98e99..f5eedce08 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java @@ -2,12 +2,12 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; @@ -27,29 +27,29 @@ public abstract class Renderer { protected TableView tableView = new TableView(); - public static Optional getDimensionByName(String name, DataSource datasource) { + public static Optional getDimensionByName(String name, DataModel datasource) { return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)) .findFirst(); } - public static Optional getMeasureByName(String name, DataSource datasource) { + public static Optional getMeasureByName(String name, DataModel datasource) { return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)) .findFirst(); } - public static Optional getMetricByName(String name, SemanticSchema schema) { + public static Optional getMetricByName(String name, S2SemanticSchema schema) { Optional metric = schema.getMetrics().stream() .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); return metric; } - public static Optional getIdentifyByName(String name, DataSource datasource) { + public static Optional getIdentifyByName(String name, DataModel datasource) { return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)) .findFirst(); } - public static MetricNode buildMetricNode(String metric, DataSource datasource, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg, String alias) + public static MetricNode buildMetricNode(String metric, DataModel datasource, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg, String alias) throws Exception { Optional metricOpt = getMetricByName(metric, schema); MetricNode metricNode = new MetricNode(); @@ -113,6 +113,6 @@ public abstract class Renderer { return SemanticNode.buildAs(alias, tableView.build()); } - public abstract void render(MetricQueryParam metricCommand, List dataSources, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception; + public abstract void render(MetricQueryParam metricCommand, List dataModels, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/TableView.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/TableView.java index 8c21132f9..74828fbc2 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/TableView.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/TableView.java @@ -1,6 +1,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import lombok.Data; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlKind; @@ -27,7 +27,7 @@ public class TableView { private String alias; private List primary; - private DataSource dataSource; + private DataModel dataModel; public SqlNode build() { measure.addAll(dimension); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java index ae3fab3a5..c71acbb6a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java @@ -6,13 +6,13 @@ import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend.LateralViewExplodeNode; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.SqlBasicCall; @@ -40,7 +40,7 @@ import java.util.stream.Collectors; @Slf4j public class DataSourceNode extends SemanticNode { - public static SqlNode build(DataSource datasource, SqlValidatorScope scope) throws Exception { + public static SqlNode build(DataModel datasource, SqlValidatorScope scope) throws Exception { String sqlTable = ""; if (datasource.getSqlQuery() != null && !datasource.getSqlQuery().isEmpty()) { sqlTable = datasource.getSqlQuery(); @@ -61,7 +61,7 @@ public class DataSourceNode extends SemanticNode { return buildAs(datasource.getName(), source); } - private static void addSchema(SqlValidatorScope scope, DataSource datasource, String table) + private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table) throws Exception { Map> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table); for (Map.Entry> entry : sqlTable.entrySet()) { @@ -75,7 +75,7 @@ public class DataSourceNode extends SemanticNode { } } - private static void addSchemaTable(SqlValidatorScope scope, DataSource datasource, String db, + private static void addSchemaTable(SqlValidatorScope scope, DataModel datasource, String db, String tb, Set fields) throws Exception { Set dateInfo = new HashSet<>(); Set dimensions = new HashSet<>(); @@ -112,7 +112,7 @@ public class DataSourceNode extends SemanticNode { dateInfo, dimensions, metrics); } - public static SqlNode buildExtend(DataSource datasource, Map exprList, + public static SqlNode buildExtend(DataModel datasource, Map exprList, SqlValidatorScope scope) throws Exception { if (CollectionUtils.isEmpty(exprList)) { return build(datasource, scope); @@ -146,11 +146,11 @@ public class DataSourceNode extends SemanticNode { return sqlNode; } - public static String getNames(List dataSourceList) { - return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); + public static String getNames(List dataModelList) { + return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); } - public static void getQueryDimensionMeasure(SemanticSchema schema, + public static void getQueryDimensionMeasure(S2SemanticSchema schema, MetricQueryParam metricCommand, Set queryDimension, List measures) { queryDimension.addAll(metricCommand.getDimensions().stream() .map(d -> d.contains(Constants.DIMENSION_IDENTIFY) @@ -166,7 +166,7 @@ public class DataSourceNode extends SemanticNode { .forEach(m -> measures.add(m)); } - public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, + public static void mergeQueryFilterDimensionMeasure(S2SemanticSchema schema, MetricQueryParam metricCommand, Set queryDimension, List measures, SqlValidatorScope scope) throws Exception { EngineType engineType = @@ -193,18 +193,18 @@ public class DataSourceNode extends SemanticNode { } } - public static List getMatchDataSources(SqlValidatorScope scope, - SemanticSchema schema, MetricQueryParam metricCommand) throws Exception { - List dataSources = new ArrayList<>(); + public static List getMatchDataSources(SqlValidatorScope scope, + S2SemanticSchema schema, MetricQueryParam metricCommand) throws Exception { + List dataModels = new ArrayList<>(); // check by metric List measures = new ArrayList<>(); Set queryDimension = new HashSet<>(); getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures); - DataSource baseDataSource = null; + DataModel baseDataModel = null; // one , match measure count Map dataSourceMeasures = new HashMap<>(); - for (Map.Entry entry : schema.getDatasource().entrySet()) { + for (Map.Entry entry : schema.getDatasource().entrySet()) { Set sourceMeasure = entry.getValue().getMeasures().stream() .map(mm -> mm.getName()).collect(Collectors.toSet()); sourceMeasure.retainAll(measures); @@ -214,19 +214,19 @@ public class DataSourceNode extends SemanticNode { Optional> base = dataSourceMeasures.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); if (base.isPresent()) { - baseDataSource = schema.getDatasource().get(base.get().getKey()); - dataSources.add(baseDataSource); + baseDataModel = schema.getDatasource().get(base.get().getKey()); + dataModels.add(baseDataModel); } // second , check match all dimension and metric - if (baseDataSource != null) { + if (baseDataModel != null) { Set filterMeasure = new HashSet<>(); - Set sourceMeasure = baseDataSource.getMeasures().stream() - .map(mm -> mm.getName()).collect(Collectors.toSet()); - Set dimension = baseDataSource.getDimensions().stream().map(dd -> dd.getName()) + Set sourceMeasure = baseDataModel.getMeasures().stream().map(mm -> mm.getName()) .collect(Collectors.toSet()); - baseDataSource.getIdentifiers().stream().forEach(i -> dimension.add(i.getName())); - if (schema.getDimension().containsKey(baseDataSource.getName())) { - schema.getDimension().get(baseDataSource.getName()).stream() + Set dimension = baseDataModel.getDimensions().stream().map(dd -> dd.getName()) + .collect(Collectors.toSet()); + baseDataModel.getIdentifiers().stream().forEach(i -> dimension.add(i.getName())); + if (schema.getDimension().containsKey(baseDataModel.getName())) { + schema.getDimension().get(baseDataModel.getName()).stream() .forEach(d -> dimension.add(d.getName())); } filterMeasure.addAll(sourceMeasure); @@ -238,34 +238,34 @@ public class DataSourceNode extends SemanticNode { boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope, engineType); if (isAllMatch) { - log.debug("baseDataSource match all "); - return dataSources; + log.debug("baseDataModel match all "); + return dataModels; } // find all dataSource has the same identifiers - List linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, - measures, baseDataSource, schema); - if (CollectionUtils.isEmpty(linkDataSources)) { - log.debug("baseDataSource get by identifiers "); - Set baseIdentifiers = baseDataSource.getIdentifiers().stream() + List linkDataModels = getLinkDataSourcesByJoinRelation(queryDimension, + measures, baseDataModel, schema); + if (CollectionUtils.isEmpty(linkDataModels)) { + log.debug("baseDataModel get by identifiers "); + Set baseIdentifiers = baseDataModel.getIdentifiers().stream() .map(i -> i.getName()).collect(Collectors.toSet()); if (baseIdentifiers.isEmpty()) { throw new Exception( - "datasource error : " + baseDataSource.getName() + " miss identifier"); + "datasource error : " + baseDataModel.getName() + " miss identifier"); } - linkDataSources = getLinkDataSources(baseIdentifiers, queryDimension, measures, - baseDataSource, schema); - if (linkDataSources.isEmpty()) { + linkDataModels = getLinkDataSources(baseIdentifiers, queryDimension, measures, + baseDataModel, schema); + if (linkDataModels.isEmpty()) { throw new Exception(String.format( "not find the match datasource : dimension[%s],measure[%s]", queryDimension, measures)); } } - log.debug("linkDataSources {}", linkDataSources); - return linkDataSources; - // dataSources.addAll(linkDataSources); + log.debug("linkDataModels {}", linkDataModels); + return linkDataModels; + // dataModels.addAll(linkDataModels); } - return dataSources; + return dataModels; } private static boolean checkMatch(Set sourceMeasure, Set queryDimension, @@ -301,17 +301,17 @@ public class DataSourceNode extends SemanticNode { return isAllMatch; } - private static List getLinkDataSourcesByJoinRelation(Set queryDimension, - List measures, DataSource baseDataSource, SemanticSchema schema) { + private static List getLinkDataSourcesByJoinRelation(Set queryDimension, + List measures, DataModel baseDataModel, S2SemanticSchema schema) { Set linkDataSourceName = new HashSet<>(); - List linkDataSources = new ArrayList<>(); + List linkDataModels = new ArrayList<>(); Set before = new HashSet<>(); - before.add(baseDataSource.getName()); + before.add(baseDataModel.getName()); if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { Set visitJoinRelations = new HashSet<>(); List sortedJoinRelation = new ArrayList<>(); - sortJoinRelation(schema.getJoinRelations(), baseDataSource.getName(), - visitJoinRelations, sortedJoinRelation); + sortJoinRelation(schema.getJoinRelations(), baseDataModel.getName(), visitJoinRelations, + sortedJoinRelation); schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId())) .forEach(j -> sortedJoinRelation.add(j)); for (JoinRelation joinRelation : sortedJoinRelation) { @@ -321,7 +321,7 @@ public class DataSourceNode extends SemanticNode { } boolean isMatch = false; boolean isRight = before.contains(joinRelation.getLeft()); - DataSource other = isRight ? schema.getDatasource().get(joinRelation.getRight()) + DataModel other = isRight ? schema.getDatasource().get(joinRelation.getRight()) : schema.getDatasource().get(joinRelation.getLeft()); if (!queryDimension.isEmpty()) { Set linkDimension = other.getDimensions().stream() @@ -354,8 +354,8 @@ public class DataSourceNode extends SemanticNode { } if (!CollectionUtils.isEmpty(linkDataSourceName)) { Map orders = new HashMap<>(); - linkDataSourceName.add(baseDataSource.getName()); - orders.put(baseDataSource.getName(), 0L); + linkDataSourceName.add(baseDataModel.getName()); + orders.put(baseDataModel.getName(), 0L); for (JoinRelation joinRelation : schema.getJoinRelations()) { if (linkDataSourceName.contains(joinRelation.getLeft()) && linkDataSourceName.contains(joinRelation.getRight())) { @@ -364,10 +364,10 @@ public class DataSourceNode extends SemanticNode { } } orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> { - linkDataSources.add(schema.getDatasource().get(d.getKey())); + linkDataModels.add(schema.getDatasource().get(d.getKey())); }); } - return linkDataSources; + return linkDataModels; } private static void sortJoinRelation(List joinRelations, String next, @@ -385,13 +385,13 @@ public class DataSourceNode extends SemanticNode { } } - private static List getLinkDataSources(Set baseIdentifiers, - Set queryDimension, List measures, DataSource baseDataSource, - SemanticSchema schema) { + private static List getLinkDataSources(Set baseIdentifiers, + Set queryDimension, List measures, DataModel baseDataModel, + S2SemanticSchema schema) { Set linkDataSourceName = new HashSet<>(); - List linkDataSources = new ArrayList<>(); - for (Map.Entry entry : schema.getDatasource().entrySet()) { - if (entry.getKey().equalsIgnoreCase(baseDataSource.getName())) { + List linkDataModels = new ArrayList<>(); + for (Map.Entry entry : schema.getDatasource().entrySet()) { + if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) { continue; } Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName()) @@ -432,12 +432,12 @@ public class DataSourceNode extends SemanticNode { } } for (String linkName : linkDataSourceName) { - linkDataSources.add(schema.getDatasource().get(linkName)); + linkDataModels.add(schema.getDatasource().get(linkName)); } - if (!CollectionUtils.isEmpty(linkDataSources)) { - List all = new ArrayList<>(); - all.add(baseDataSource); - all.addAll(linkDataSources); + if (!CollectionUtils.isEmpty(linkDataModels)) { + List all = new ArrayList<>(); + all.add(baseDataModel); + all.addAll(linkDataModels); return all; } return Lists.newArrayList(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java index 6a894452b..d50566a3e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java @@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.node; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import lombok.Data; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.validate.SqlValidatorScope; @@ -30,7 +30,7 @@ public class MetricNode extends SemanticNode { return buildAs(metric.getName(), sqlNode); } - public static Boolean isMetricField(String name, SemanticSchema schema) { + public static Boolean isMetricField(String name, S2SemanticSchema schema) { Optional metric = schema.getMetrics().stream() .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java index 78cc8720e..1ad5f569e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java @@ -5,7 +5,7 @@ import com.tencent.supersonic.common.calcite.SemanticSqlDialect; import com.tencent.supersonic.common.calcite.SqlDialectFactory; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer.FilterToGroupScanRule; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.plan.RelOptPlanner; @@ -397,8 +397,8 @@ public abstract class SemanticNode { return parseInfo; } - public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode, - EngineType engineType) { + public static SqlNode optimize(SqlValidatorScope scope, S2SemanticSchema schema, + SqlNode sqlNode, EngineType engineType) { try { HepProgramBuilder hepProgramBuilder = new HepProgramBuilder(); SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java index b812f8f8c..bc5194970 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java @@ -1,6 +1,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelRule; import org.apache.calcite.rel.core.Aggregate; @@ -40,9 +40,10 @@ public class FilterToGroupScanRule extends RelRule implements Transforma }); }).as(FilterTableScanRule.Config.class); - private SemanticSchema semanticSchema; + private S2SemanticSchema semanticSchema; - public FilterToGroupScanRule(FilterTableScanRule.Config config, SemanticSchema semanticSchema) { + public FilterToGroupScanRule(FilterTableScanRule.Config config, + S2SemanticSchema semanticSchema) { super(config); this.semanticSchema = semanticSchema; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java index bf3abfd03..ea00612de 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java @@ -3,9 +3,9 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; @@ -27,8 +27,8 @@ import java.util.stream.Collectors; public class FilterRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataSources, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { + public void render(MetricQueryParam metricCommand, List dataModels, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { TableView tableView = super.tableView; SqlNode filterNode = null; List queryMetrics = new ArrayList<>(metricCommand.getMetrics()); @@ -43,9 +43,9 @@ public class FilterRender extends Renderer { List fieldWhere = whereFields.stream().collect(Collectors.toList()); Set dimensions = new HashSet<>(); Set metrics = new HashSet<>(); - for (DataSource dataSource : dataSources) { + for (DataModel dataModel : dataModels) { SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(), - metricCommand.getDimensions(), dataSource, schema, dimensions, metrics); + metricCommand.getDimensions(), dataModel, schema, dimensions, metrics); } queryMetrics.addAll(metrics); queryDimensions.addAll(dimensions); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index 76cc6bf68..eff262f5a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -3,13 +3,13 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode; @@ -48,8 +48,8 @@ import java.util.stream.Collectors; public class JoinRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataSources, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { + public void render(MetricQueryParam metricCommand, List dataModels, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricCommand.getWhere(); EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); @@ -71,14 +71,14 @@ public class JoinRender extends Renderer { Set filterDimension = new HashSet<>(); Map beforeSources = new HashMap<>(); - for (int i = 0; i < dataSources.size(); i++) { - final DataSource dataSource = dataSources.get(i); + for (int i = 0; i < dataModels.size(); i++) { + final DataModel dataModel = dataModels.get(i); final Set filterDimensions = new HashSet<>(); final Set filterMetrics = new HashSet<>(); final List queryDimension = new ArrayList<>(); final List queryMetrics = new ArrayList<>(); - SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataSource, - schema, filterDimensions, filterMetrics); + SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema, + filterDimensions, filterMetrics); List reqMetric = new ArrayList<>(metricCommand.getMetrics()); reqMetric.addAll(filterMetrics); reqMetric = uniqList(reqMetric); @@ -87,40 +87,40 @@ public class JoinRender extends Renderer { reqDimension.addAll(filterDimensions); reqDimension = uniqList(reqDimension); - Set sourceMeasure = dataSource.getMeasures().stream().map(mm -> mm.getName()) + Set sourceMeasure = dataModel.getMeasures().stream().map(mm -> mm.getName()) .collect(Collectors.toSet()); - doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataSource, sourceMeasure, + doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataModel, sourceMeasure, scope, schema, nonAgg); - Set dimension = dataSource.getDimensions().stream().map(dd -> dd.getName()) + Set dimension = dataModel.getDimensions().stream().map(dd -> dd.getName()) .collect(Collectors.toSet()); - doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataSource, + doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataModel, dimension, scope, schema); List primary = new ArrayList<>(); - for (Identify identify : dataSource.getIdentifiers()) { + for (Identify identify : dataModel.getIdentifiers()) { primary.add(identify.getName()); if (!fieldWhere.contains(identify.getName())) { fieldWhere.add(identify.getName()); } } List dataSourceWhere = new ArrayList<>(fieldWhere); - addZipperField(dataSource, dataSourceWhere); + addZipperField(dataModel, dataSourceWhere); TableView tableView = SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension, - metricCommand.getWhere(), dataSources.get(i), scope, schema, true); + metricCommand.getWhere(), dataModels.get(i), scope, schema, true); log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString())); - String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); + String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); tableView.setAlias(alias); tableView.setPrimary(primary); - tableView.setDataSource(dataSource); + tableView.setDataModel(dataModel); if (left == null) { leftTable = tableView; left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)); - beforeSources.put(dataSource.getName(), leftTable.getAlias()); + beforeSources.put(dataModel.getName(), leftTable.getAlias()); continue; } - left = buildJoin(left, leftTable, tableView, beforeSources, dataSource, schema, scope); + left = buildJoin(left, leftTable, tableView, beforeSources, dataModel, schema, scope); leftTable = tableView; - beforeSources.put(dataSource.getName(), tableView.getAlias()); + beforeSources.put(dataModel.getName(), tableView.getAlias()); } for (Map.Entry entry : innerSelect.entrySet()) { @@ -144,16 +144,15 @@ public class JoinRender extends Renderer { } private void doMetric(Map innerSelect, TableView filterView, - List queryMetrics, List reqMetrics, DataSource dataSource, - Set sourceMeasure, SqlValidatorScope scope, SemanticSchema schema, + List queryMetrics, List reqMetrics, DataModel dataModel, + Set sourceMeasure, SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { - String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); + String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); for (String m : reqMetrics) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { - MetricNode metricNode = - buildMetricNode(m, dataSource, scope, schema, nonAgg, alias); + MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias); if (!metricNode.getNonAggNode().isEmpty()) { for (String measure : metricNode.getNonAggNode().keySet()) { @@ -181,14 +180,14 @@ public class JoinRender extends Renderer { } private void doDimension(Map innerSelect, Set filterDimension, - List queryDimension, List reqDimensions, DataSource dataSource, - Set dimension, SqlValidatorScope scope, SemanticSchema schema) + List queryDimension, List reqDimensions, DataModel dataModel, + Set dimension, SqlValidatorScope scope, S2SemanticSchema schema) throws Exception { - String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); + String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); for (String d : reqDimensions) { - if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) { + if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) { String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY); innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode @@ -209,7 +208,7 @@ public class JoinRender extends Renderer { .collect(Collectors.toSet()); } - private boolean getMatchMetric(SemanticSchema schema, Set sourceMeasure, String m, + private boolean getMatchMetric(S2SemanticSchema schema, Set sourceMeasure, String m, List queryMetrics) { Optional metric = schema.getMetrics().stream() .filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst(); @@ -230,8 +229,8 @@ public class JoinRender extends Renderer { return isAdd; } - private boolean getMatchDimension(SemanticSchema schema, Set sourceDimension, - DataSource dataSource, String d, List queryDimension) { + private boolean getMatchDimension(S2SemanticSchema schema, Set sourceDimension, + DataModel dataModel, String d, List queryDimension) { String oriDimension = d; boolean isAdd = false; if (d.contains(Constants.DIMENSION_IDENTIFY)) { @@ -240,14 +239,14 @@ public class JoinRender extends Renderer { if (sourceDimension.contains(oriDimension)) { isAdd = true; } - for (Identify identify : dataSource.getIdentifiers()) { + for (Identify identify : dataModel.getIdentifiers()) { if (identify.getName().equalsIgnoreCase(oriDimension)) { isAdd = true; break; } } - if (schema.getDimension().containsKey(dataSource.getName())) { - for (Dimension dim : schema.getDimension().get(dataSource.getName())) { + if (schema.getDimension().containsKey(dataModel.getName())) { + for (Dimension dim : schema.getDimension().get(dataModel.getName())) { if (dim.getName().equalsIgnoreCase(oriDimension)) { isAdd = true; } @@ -264,12 +263,12 @@ public class JoinRender extends Renderer { } private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, - Map before, DataSource dataSource, SemanticSchema schema, + Map before, DataModel dataModel, S2SemanticSchema schema, SqlValidatorScope scope) throws Exception { EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); SqlNode condition = - getCondition(leftTable, tableView, dataSource, schema, scope, engineType); + getCondition(leftTable, tableView, dataModel, schema, scope, engineType); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema); SqlNode joinRelationCondition = null; @@ -278,11 +277,11 @@ public class JoinRender extends Renderer { joinRelationCondition = getCondition(matchJoinRelation, scope, engineType); condition = joinRelationCondition; } - if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType()) + if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType()) || Materialization.TimePartType.ZIPPER - .equals(tableView.getDataSource().getTimePartType())) { + .equals(tableView.getDataModel().getTimePartType())) { SqlNode zipperCondition = - getZipperCondition(leftTable, tableView, dataSource, schema, scope); + getZipperCondition(leftTable, tableView, dataModel, schema, scope); if (Objects.nonNull(joinRelationCondition)) { condition = new SqlBasicCall(SqlStdOperatorTable.AND, new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)), @@ -299,11 +298,11 @@ public class JoinRender extends Renderer { } private JoinRelation getMatchJoinRelation(Map before, TableView tableView, - SemanticSchema schema) { + S2SemanticSchema schema) { JoinRelation matchJoinRelation = JoinRelation.builder().build(); if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { for (JoinRelation joinRelation : schema.getJoinRelations()) { - if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataSource().getName()) + if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName()) && before.containsKey(joinRelation.getLeft())) { matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream() .map(r -> Triple.of( @@ -338,8 +337,8 @@ public class JoinRender extends Renderer { return condition; } - private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, - SemanticSchema schema, SqlValidatorScope scope, EngineType engineType) + private SqlNode getCondition(TableView left, TableView right, DataModel dataModel, + S2SemanticSchema schema, SqlValidatorScope scope, EngineType engineType) throws Exception { Set selectLeft = SemanticNode.getSelect(left.getTable()); @@ -347,16 +346,16 @@ public class JoinRender extends Renderer { selectLeft.retainAll(selectRight); SqlNode condition = null; for (String on : selectLeft) { - if (!SourceRender.isDimension(on, dataSource, schema)) { + if (!SourceRender.isDimension(on, dataModel, schema)) { continue; } - if (IdentifyNode.isForeign(on, left.getDataSource().getIdentifiers())) { - if (!IdentifyNode.isPrimary(on, right.getDataSource().getIdentifiers())) { + if (IdentifyNode.isForeign(on, left.getDataModel().getIdentifiers())) { + if (!IdentifyNode.isPrimary(on, right.getDataModel().getIdentifiers())) { continue; } } - if (IdentifyNode.isForeign(on, right.getDataSource().getIdentifiers())) { - if (!IdentifyNode.isPrimary(on, left.getDataSource().getIdentifiers())) { + if (IdentifyNode.isForeign(on, right.getDataModel().getIdentifiers())) { + if (!IdentifyNode.isPrimary(on, left.getDataModel().getIdentifiers())) { continue; } } @@ -396,9 +395,9 @@ public class JoinRender extends Renderer { visited.put(id, false); } - private void addZipperField(DataSource dataSource, List fields) { - if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) { - dataSource.getDimensions().stream() + private void addZipperField(DataModel dataModel, List fields) { + if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { + dataModel.getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .forEach(t -> { if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END) @@ -413,18 +412,18 @@ public class JoinRender extends Renderer { } } - private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, - SemanticSchema schema, SqlValidatorScope scope) throws Exception { - if (Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType()) + private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel, + S2SemanticSchema schema, SqlValidatorScope scope) throws Exception { + if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType()) && Materialization.TimePartType.ZIPPER - .equals(right.getDataSource().getTimePartType())) { + .equals(right.getDataModel().getTimePartType())) { throw new Exception("not support two zipper table"); } SqlNode condition = null; - Optional leftTime = left.getDataSource().getDimensions().stream() + Optional leftTime = left.getDataModel().getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .findFirst(); - Optional rightTime = right.getDataSource().getDimensions().stream() + Optional rightTime = right.getDataModel().getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .findFirst(); if (leftTime.isPresent() && rightTime.isPresent()) { @@ -434,7 +433,7 @@ public class JoinRender extends Renderer { String dateTime = ""; Optional startTimeOp = (Materialization.TimePartType.ZIPPER - .equals(left.getDataSource().getTimePartType()) ? left : right).getDataSource() + .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel() .getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME .equalsIgnoreCase(d.getType())) @@ -442,7 +441,7 @@ public class JoinRender extends Renderer { .startsWith(Constants.MATERIALIZATION_ZIPPER_START)) .findFirst(); Optional endTimeOp = (Materialization.TimePartType.ZIPPER - .equals(left.getDataSource().getTimePartType()) ? left : right).getDataSource() + .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel() .getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME .equalsIgnoreCase(d.getType())) @@ -451,11 +450,11 @@ public class JoinRender extends Renderer { .findFirst(); if (startTimeOp.isPresent() && endTimeOp.isPresent()) { TableView zipper = Materialization.TimePartType.ZIPPER - .equals(left.getDataSource().getTimePartType()) ? left : right; + .equals(left.getDataModel().getTimePartType()) ? left : right; TableView partMetric = Materialization.TimePartType.ZIPPER - .equals(left.getDataSource().getTimePartType()) ? right : left; + .equals(left.getDataModel().getTimePartType()) ? right : left; Optional partTime = Materialization.TimePartType.ZIPPER - .equals(left.getDataSource().getTimePartType()) ? rightTime : leftTime; + .equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime; startTime = zipper.getAlias() + "." + startTimeOp.get().getName(); endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); dateTime = partMetric.getAlias() + "." + partTime.get().getName(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java index 92dffeb9d..729022216 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java @@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; @@ -23,8 +23,8 @@ import java.util.List; public class OutputRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataSources, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { + public void render(MetricQueryParam metricCommand, List dataModels, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { TableView selectDataSet = super.tableView; EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index 140d601d9..f4d2876af 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -3,13 +3,13 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; @@ -21,7 +21,6 @@ import com.tencent.supersonic.headless.core.translator.calcite.sql.node.Semantic import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.validate.SqlValidatorScope; -import org.apache.calcite.util.Litmus; import org.springframework.util.CollectionUtils; import java.util.ArrayList; @@ -44,7 +43,7 @@ public class SourceRender extends Renderer { public static TableView renderOne(String alias, List fieldWheres, List reqMetrics, List reqDimensions, String queryWhere, - DataSource datasource, SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) + DataModel datasource, SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { TableView dataSet = new TableView(); @@ -107,8 +106,8 @@ public class SourceRender extends Renderer { - private static void buildDimension(String alias, String dimension, DataSource datasource, - SemanticSchema schema, boolean nonAgg, Map extendFields, + private static void buildDimension(String alias, String dimension, DataModel datasource, + S2SemanticSchema schema, boolean nonAgg, Map extendFields, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { List dimensionList = schema.getDimension().get(datasource.getName()); EngineType engineType = @@ -186,8 +185,8 @@ public class SourceRender extends Renderer { } private static List getWhereMeasure(List fields, List queryMetrics, - List queryDimensions, Map extendFields, DataSource datasource, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { + List queryDimensions, Map extendFields, DataModel datasource, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { Iterator iterator = fields.iterator(); List whereNode = new ArrayList<>(); EngineType engineType = @@ -229,8 +228,8 @@ public class SourceRender extends Renderer { private static void mergeWhere(List fields, TableView dataSet, TableView outputSet, List queryMetrics, List queryDimensions, - Map extendFields, DataSource datasource, SqlValidatorScope scope, - SemanticSchema schema, boolean nonAgg) throws Exception { + Map extendFields, DataModel datasource, SqlValidatorScope scope, + S2SemanticSchema schema, boolean nonAgg) throws Exception { List whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource, scope, schema, nonAgg); dataSet.getMeasure().addAll(whereNode); @@ -238,7 +237,7 @@ public class SourceRender extends Renderer { } public static void whereDimMetric(List fields, List queryMetrics, - List queryDimensions, DataSource datasource, SemanticSchema schema, + List queryDimensions, DataModel datasource, S2SemanticSchema schema, Set dimensions, Set metrics) { for (String field : fields) { if (queryDimensions.contains(field) || queryMetrics.contains(field)) { @@ -252,8 +251,8 @@ public class SourceRender extends Renderer { } } - private static void addField(String field, String oriField, DataSource datasource, - SemanticSchema schema, Set dimensions, Set metrics) { + private static void addField(String field, String oriField, DataModel datasource, + S2SemanticSchema schema, Set dimensions, Set metrics) { Optional dimension = datasource.getDimensions().stream() .filter(d -> d.getName().equalsIgnoreCase(field)).findFirst(); if (dimension.isPresent()) { @@ -293,7 +292,7 @@ public class SourceRender extends Renderer { } } - public static boolean isDimension(String name, DataSource datasource, SemanticSchema schema) { + public static boolean isDimension(String name, DataModel datasource, S2SemanticSchema schema) { Optional dimension = datasource.getDimensions().stream() .filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); if (dimension.isPresent()) { @@ -314,13 +313,13 @@ public class SourceRender extends Renderer { return false; } - private static void addTimeDimension(DataSource dataSource, List queryDimension) { - if (Materialization.TimePartType.ZIPPER.equals(dataSource.getTimePartType())) { - Optional startTimeOp = dataSource.getDimensions().stream() + private static void addTimeDimension(DataModel dataModel, List queryDimension) { + if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { + Optional startTimeOp = dataModel.getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)) .findFirst(); - Optional endTimeOp = dataSource.getDimensions().stream() + Optional endTimeOp = dataModel.getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)) .findFirst(); @@ -331,7 +330,7 @@ public class SourceRender extends Renderer { queryDimension.add(endTimeOp.get().getName()); } } else { - Optional timeOp = dataSource.getDimensions().stream() + Optional timeOp = dataModel.getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .findFirst(); if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) { @@ -340,8 +339,8 @@ public class SourceRender extends Renderer { } } - public void render(MetricQueryParam metricQueryParam, List dataSources, - SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception { + public void render(MetricQueryParam metricQueryParam, List dataModels, + SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricQueryParam.getWhere(); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); @@ -352,15 +351,15 @@ public class SourceRender extends Renderer { FilterNode.getFilterField(sqlNode, whereFields); fieldWhere = whereFields.stream().collect(Collectors.toList()); } - if (dataSources.size() == 1) { - DataSource dataSource = dataSources.get(0); + if (dataModels.size() == 1) { + DataModel dataModel = dataModels.get(0); super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(), - metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataSource, - scope, schema, nonAgg); + metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataModel, scope, + schema, nonAgg); return; } JoinRender joinRender = new JoinRender(); - joinRender.render(metricQueryParam, dataSources, scope, schema, nonAgg); + joinRender.render(metricQueryParam, dataModels, scope, schema, nonAgg); super.tableView = joinRender.getTableView(); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java index 33102db14..b85682f31 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java @@ -5,7 +5,7 @@ import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.BeanUtils; @@ -60,7 +60,7 @@ public class ParserDefaultConverter implements QueryConverter { // support detail query if (queryParam.getQueryType().isNativeAggQuery() && CollectionUtils.isEmpty(metricQueryParam.getMetrics())) { - Map modelMap = queryStatement.getSemanticModel().getModelMap(); + Map modelMap = queryStatement.getSemanticModel().getModelMap(); for (Long modelId : modelMap.keySet()) { String modelBizName = modelMap.get(modelId).getName(); String internalMetricName = diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java index 179ead737..5052059bd 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java @@ -4,7 +4,7 @@ import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType; import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; @@ -39,9 +39,9 @@ public class SqlVariableParseConverter implements QueryConverter { SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), modelResp.getModelDetail().getSqlVariables(), queryStatement.getQueryParam().getParams()); - DataSource dataSource = queryStatement.getSemanticModel().getDatasourceMap() + DataModel dataModel = queryStatement.getSemanticModel().getDatasourceMap() .get(modelResp.getBizName()); - dataSource.setSqlQuery(sqlParsed); + dataModel.setSqlQuery(sqlParsed); } } } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index 628a3b79d..bb978fca9 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -8,7 +8,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.TagResp; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams; @@ -19,7 +19,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; @@ -73,9 +73,9 @@ public class SemanticSchemaManager { getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName)); } if (!dataModelYamlTpls.isEmpty()) { - Map dataSourceMap = + Map dataSourceMap = dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect( - Collectors.toMap(DataSource::getName, item -> item, (k1, k2) -> k1)); + Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1)); semanticModel.setDatasourceMap(dataSourceMap); } if (!dimensionYamlTpls.isEmpty()) { @@ -107,8 +107,7 @@ public class SemanticSchemaManager { } if (Objects.nonNull(semanticModel.getDatasourceMap()) && !semanticModel.getDatasourceMap().isEmpty()) { - for (Map.Entry entry : semanticModel.getDatasourceMap() - .entrySet()) { + for (Map.Entry entry : semanticModel.getDatasourceMap().entrySet()) { List modelDimensions = new ArrayList<>(); if (!semanticModel.getDimensionMap().containsKey(entry.getKey())) { semanticModel.getDimensionMap().put(entry.getKey(), modelDimensions); @@ -178,8 +177,8 @@ public class SemanticSchemaManager { return getDimension(t); } - public static DataSource getDatasource(final DataModelYamlTpl d) { - DataSource datasource = DataSource.builder().id(d.getId()).sourceId(d.getSourceId()) + public static DataModel getDatasource(final DataModelYamlTpl d) { + DataModel datasource = DataModel.builder().id(d.getId()).sourceId(d.getSourceId()) .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName()) .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers())) .measures(getMeasureParams(d.getMeasures())) @@ -356,17 +355,17 @@ public class SemanticSchemaManager { return joinRelations; } - public static void update(SemanticSchema schema, List metric) throws Exception { + public static void update(S2SemanticSchema schema, List metric) throws Exception { if (schema != null) { updateMetric(metric, schema.getMetrics()); } } - public static void update(SemanticSchema schema, DataSource datasourceYamlTpl) + public static void update(S2SemanticSchema schema, DataModel datasourceYamlTpl) throws Exception { if (schema != null) { String dataSourceName = datasourceYamlTpl.getName(); - Optional> datasourceYamlTplMap = + Optional> datasourceYamlTplMap = schema.getDatasource().entrySet().stream() .filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst(); if (datasourceYamlTplMap.isPresent()) { @@ -377,7 +376,7 @@ public class SemanticSchemaManager { } } - public static void update(SemanticSchema schema, String datasourceBizName, + public static void update(S2SemanticSchema schema, String datasourceBizName, List dimensionYamlTpls) throws Exception { if (schema != null) { Optional>> datasourceYamlTplMap = schema diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index 40cd3d5db..fa7896f3a 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -7,7 +7,7 @@ import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; @@ -27,9 +27,9 @@ import java.util.Map; @Slf4j class HeadlessParserServiceTest { - private static Map headlessSchemaMap = new HashMap<>(); + private static Map headlessSchemaMap = new HashMap<>(); - public static SqlParserResp parser(SemanticSchema semanticSchema, + public static SqlParserResp parser(S2SemanticSchema semanticSchema, MetricQueryParam metricQueryParam, boolean isAgg) { SqlParserResp sqlParser = new SqlParserResp(); try { @@ -40,7 +40,7 @@ class HeadlessParserServiceTest { AggPlanner aggBuilder = new AggPlanner(semanticSchema); QueryStatement queryStatement = new QueryStatement(); queryStatement.setMetricQueryParam(metricQueryParam); - aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg)); + aggBuilder.plan(queryStatement, AggOption.getAggregation(!isAgg)); EngineType engineType = EngineType .fromString(semanticSchema.getSemanticModel().getDatabase().getType()); sqlParser.setSql(aggBuilder.getSql(engineType)); @@ -122,7 +122,7 @@ class HeadlessParserServiceTest { identify.setType("primary"); identifies.add(identify); datasource.setIdentifiers(identifies); - SemanticSchema semanticSchema = SemanticSchema.newBuilder("1").build(); + S2SemanticSchema semanticSchema = S2SemanticSchema.newBuilder("1").build(); SemanticSchemaManager.update(semanticSchema, SemanticSchemaManager.getDatasource(datasource)); @@ -192,7 +192,7 @@ class HeadlessParserServiceTest { System.out.println(parser(semanticSchema, metricCommand2, true)); } - private static void addDepartment(SemanticSchema semanticSchema) { + private static void addDepartment(S2SemanticSchema semanticSchema) { DataModelYamlTpl datasource = new DataModelYamlTpl(); datasource.setName("user_department"); datasource.setSourceId(1L); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java index 661262d50..e93e5bcb6 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java @@ -50,8 +50,7 @@ public class SchemaAuthTest extends BaseTest { @Test public void test_getVisibleModelList_alice() { User user = DataUtils.getUserAlice(); - List modelResps = - modelService.getModelListWithAuth(user, null, AuthType.VIEWER); + List modelResps = modelService.getModelListWithAuth(user, null, AuthType.VIEWER); List expectedModelBizNames = Lists.newArrayList("user_department", "singer"); Assertions.assertEquals(expectedModelBizNames, modelResps.stream().map(ModelResp::getBizName).collect(Collectors.toList())); From 159d91fd0fba91aeccdda81a8f377ac109bd83cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=A7=A3=28xie=29=E5=85=88=E7=94=9F=F0=9F=8C=BD?= Date: Mon, 18 Nov 2024 14:39:34 +0800 Subject: [PATCH 04/88] =?UTF-8?q?(fix)=20(chat)=20=E8=AE=B0=E5=BF=86?= =?UTF-8?q?=E7=AE=A1=E7=90=86=E6=9B=B4=E6=96=B0=E4=B8=8D=E7=94=9F=E6=95=88?= =?UTF-8?q?=20(#1912)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../supersonic/chat/server/memory/MemoryReviewTask.java | 9 +++++++++ .../chat/server/service/impl/MemoryServiceImpl.java | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/memory/MemoryReviewTask.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/memory/MemoryReviewTask.java index aeb3c5baa..09889e489 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/memory/MemoryReviewTask.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/memory/MemoryReviewTask.java @@ -88,6 +88,15 @@ public class MemoryReviewTask { return; } + // 如果大模型已经评估过,则不再评估 + if (Objects.nonNull(m.getLlmReviewRet())) { + // directly enable memory if the LLM determines it positive + if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) { + memoryService.enableMemory(m); + } + return; + } + String promptStr = createPromptString(m, chatApp.getPrompt()); Prompt prompt = PromptTemplate.from(promptStr).apply(Collections.EMPTY_MAP); diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/MemoryServiceImpl.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/MemoryServiceImpl.java index 2f1f89dd4..4ab32b8dc 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/MemoryServiceImpl.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/service/impl/MemoryServiceImpl.java @@ -49,10 +49,10 @@ public class MemoryServiceImpl implements MemoryService { @Override public void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user) { ChatMemoryDO chatMemoryDO = chatMemoryRepository.getMemory(chatMemoryUpdateReq.getId()); + boolean hadEnabled = MemoryStatus.ENABLED.equals(chatMemoryDO.getStatus()); chatMemoryDO.setUpdatedBy(user.getName()); chatMemoryDO.setUpdatedAt(new Date()); BeanMapper.mapper(chatMemoryUpdateReq, chatMemoryDO); - boolean hadEnabled = MemoryStatus.ENABLED.equals(chatMemoryDO.getStatus()); if (MemoryStatus.ENABLED.equals(chatMemoryUpdateReq.getStatus()) && !hadEnabled) { enableMemory(chatMemoryDO); } else if (MemoryStatus.DISABLED.equals(chatMemoryUpdateReq.getStatus()) && hadEnabled) { From 8d63ed170a1c8485169a744d69e96b91c957c1ad Mon Sep 17 00:00:00 2001 From: tristanliu Date: Tue, 19 Nov 2024 14:28:29 +0800 Subject: [PATCH 05/88] [improvement][headless-fe] Added null-check conditions to the data formatting function. --- webapp/packages/chat-sdk/src/utils/utils.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/webapp/packages/chat-sdk/src/utils/utils.ts b/webapp/packages/chat-sdk/src/utils/utils.ts index de8494240..b514a0105 100644 --- a/webapp/packages/chat-sdk/src/utils/utils.ts +++ b/webapp/packages/chat-sdk/src/utils/utils.ts @@ -3,6 +3,9 @@ import { NumericUnit } from '../common/constants'; import { isString } from 'lodash'; export function formatByDecimalPlaces(value: number | string, decimalPlaces: number) { + if (value === null || value === undefined || value === '') { + return 0; + } if (isNaN(+value) || decimalPlaces < 0 || decimalPlaces > 100) { return value; } @@ -17,6 +20,9 @@ export function formatByDecimalPlaces(value: number | string, decimalPlaces: num } export function formatByThousandSeperator(value: number | string) { + if (value === null || value === undefined || value === '') { + return 0; + } if (isNaN(+value)) { return value; } From 62a4d60a0bb6cecfa4f1235542a45902f7802c05 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Tue, 19 Nov 2024 20:13:49 +0800 Subject: [PATCH 06/88] [improvement][headless]Clean code logic of headless core. --- .../headless/core/executor/JdbcExecutor.java | 2 +- .../headless/core/pojo/QueryStatement.java | 4 +- .../translator/DefaultSemanticTranslator.java | 8 ++-- .../calcite/CalciteQueryParser.java | 25 ++++++----- .../calcite/planner/AggPlanner.java | 2 +- .../{SemanticModel.java => Ontology.java} | 2 +- .../calcite/schema/S2SemanticSchema.java | 28 ++++++------- .../converter/CalculateAggConverter.java | 2 +- .../converter/DefaultDimValueConverter.java | 2 +- .../converter/ParserDefaultConverter.java | 2 +- .../converter/SqlVariableParseConverter.java | 4 +- .../service/impl/S2SemanticLayerService.java | 39 ++++++++---------- .../server/manager/SemanticSchemaManager.java | 41 +++++++++---------- .../server/utils/QueryReqConverter.java | 36 ++++++++-------- 14 files changed, 96 insertions(+), 101 deletions(-) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/{SemanticModel.java => Ontology.java} (97%) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/JdbcExecutor.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/JdbcExecutor.java index 6c4e8ed85..a218fa0fb 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/JdbcExecutor.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/JdbcExecutor.java @@ -38,7 +38,7 @@ public class JdbcExecutor implements QueryExecutor { SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class); String sql = StringUtils.normalizeSpace(queryStatement.getSql()); log.info("executing SQL: {}", sql); - Database database = queryStatement.getSemanticModel().getDatabase(); + Database database = queryStatement.getOntology().getDatabase(); SemanticQueryResp queryResultWithColumns = new SemanticQueryResp(); try { SqlUtils sqlUtil = sqlUtils.init(database); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java index 28e0c3c8d..33266cc7a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java @@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.pojo; import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import lombok.Data; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; @@ -29,7 +29,7 @@ public class QueryStatement { private String dataSetAlias; private String dataSetSimplifySql; private Boolean enableLimitWrapper = false; - private SemanticModel semanticModel; + private Ontology ontology; private SemanticSchemaResp semanticSchemaResp; private Integer limit = 1000; private Boolean isTranslated = false; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 988817643..0eb64a7ee 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -10,7 +10,7 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.utils.ComponentFactory; import lombok.extern.slf4j.Slf4j; @@ -81,8 +81,8 @@ public class DefaultSemanticTranslator implements SemanticTranslator { private QueryStatement doParse(DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) { log.info("parse dataSetQuery [{}] ", dataSetQueryParam); - SemanticModel semanticModel = queryStatement.getSemanticModel(); - EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType()); + Ontology ontology = queryStatement.getOntology(); + EngineType engineType = EngineType.fromString(ontology.getDatabase().getType()); try { if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) { List tables = new ArrayList<>(); @@ -158,7 +158,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { tableSql.setMinMaxTime(queryStatement.getMinMaxTime()); tableSql.setEnableOptimize(queryStatement.getEnableOptimize()); tableSql.setDataSetId(queryStatement.getDataSetId()); - tableSql.setSemanticModel(queryStatement.getSemanticModel()); + tableSql.setOntology(queryStatement.getOntology()); if (isSingleMetricTable) { tableSql.setDataSetSql(dataSetQueryParam.getSql()); tableSql.setDataSetAlias(metricTable.getAlias()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index 128106b2a..5b9bc18c5 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -7,7 +7,7 @@ import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.QueryParser; import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions; import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import lombok.extern.slf4j.Slf4j; @@ -25,16 +25,16 @@ public class CalciteQueryParser implements QueryParser { @Override public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception { MetricQueryParam metricReq = queryStatement.getMetricQueryParam(); - SemanticModel semanticModel = queryStatement.getSemanticModel(); - if (semanticModel == null) { + Ontology ontology = queryStatement.getOntology(); + if (ontology == null) { queryStatement.setErrMsg("semanticSchema not found"); return; } queryStatement.setMetricQueryParam(metricReq); - S2SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement); + S2SemanticSchema semanticSchema = getSemanticSchema(ontology, queryStatement); AggPlanner aggPlanner = new AggPlanner(semanticSchema); aggPlanner.plan(queryStatement, isAgg); - EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType()); + EngineType engineType = EngineType.fromString(ontology.getDatabase().getType()); queryStatement.setSql(aggPlanner.getSql(engineType)); if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize() @@ -52,15 +52,14 @@ public class CalciteQueryParser implements QueryParser { } } - private S2SemanticSchema getSemanticSchema(SemanticModel semanticModel, - QueryStatement queryStatement) { + private S2SemanticSchema getSemanticSchema(Ontology ontology, QueryStatement queryStatement) { S2SemanticSchema semanticSchema = - S2SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build(); - semanticSchema.setSemanticModel(semanticModel); - semanticSchema.setDatasource(semanticModel.getDatasourceMap()); - semanticSchema.setDimension(semanticModel.getDimensionMap()); - semanticSchema.setMetric(semanticModel.getMetrics()); - semanticSchema.setJoinRelations(semanticModel.getJoinRelations()); + S2SemanticSchema.newBuilder(ontology.getSchemaKey()).build(); + semanticSchema.setSemanticModel(ontology); + semanticSchema.setDatasource(ontology.getDatasourceMap()); + semanticSchema.setDimension(ontology.getDimensionMap()); + semanticSchema.setMetric(ontology.getMetrics()); + semanticSchema.setJoinRelations(ontology.getJoinRelations()); semanticSchema.setRuntimeOptions( RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime()) .enableOptimize(queryStatement.getEnableOptimize()).build()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java index 4b1036f86..12aa596da 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java @@ -109,7 +109,7 @@ public class AggPlanner implements Planner { // build a parse Node parse(); // optimizer - Database database = queryStatement.getSemanticModel().getDatabase(); + Database database = queryStatement.getOntology().getDatabase(); EngineType engineType = EngineType.fromString(database.getType()); optimize(engineType); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java similarity index 97% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java index af742891e..3a0e8e489 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticModel.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java @@ -11,7 +11,7 @@ import java.util.Map; import java.util.stream.Collectors; @Data -public class SemanticModel { +public class Ontology { private String schemaKey; private List metrics = new ArrayList<>(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java index b53879817..8eff0c8dd 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java @@ -5,7 +5,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.SchemaVersion; import org.apache.calcite.schema.Table; @@ -21,7 +21,7 @@ public class S2SemanticSchema extends AbstractSchema { private final Map tableMap; - private SemanticModel semanticModel = new SemanticModel(); + private Ontology ontology = new Ontology(); private List joinRelations; @@ -40,12 +40,12 @@ public class S2SemanticSchema extends AbstractSchema { return schemaKey; } - public void setSemanticModel(SemanticModel semanticModel) { - this.semanticModel = semanticModel; + public void setSemanticModel(Ontology ontology) { + this.ontology = ontology; } - public SemanticModel getSemanticModel() { - return semanticModel; + public Ontology getSemanticModel() { + return ontology; } @Override @@ -59,35 +59,35 @@ public class S2SemanticSchema extends AbstractSchema { } public Map getDatasource() { - return semanticModel.getDatasourceMap(); + return ontology.getDatasourceMap(); } public void setDatasource(Map datasource) { - semanticModel.setDatasourceMap(datasource); + ontology.setDatasourceMap(datasource); } public Map> getDimension() { - return semanticModel.getDimensionMap(); + return ontology.getDimensionMap(); } public void setDimension(Map> dimensions) { - semanticModel.setDimensionMap(dimensions); + ontology.setDimensionMap(dimensions); } public List getMetrics() { - return semanticModel.getMetrics(); + return ontology.getMetrics(); } public void setMetric(List metric) { - semanticModel.setMetrics(metric); + ontology.setMetrics(metric); } public void setMaterializationList(List materializationList) { - semanticModel.setMaterializationList(materializationList); + ontology.setMaterializationList(materializationList); } public List getMaterializationList() { - return semanticModel.getMaterializationList(); + return ontology.getMaterializationList(); } public void setJoinRelations(List joinRelations) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java index 252dd3184..e0287e427 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java @@ -97,7 +97,7 @@ public class CalculateAggConverter implements QueryConverter { @Override public void convert(QueryStatement queryStatement) throws Exception { - Database database = queryStatement.getSemanticModel().getDatabase(); + Database database = queryStatement.getOntology().getDatabase(); DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement, EngineType.fromString(database.getType().toUpperCase()), database.getVersion()); queryStatement.setDataSetQueryParam(dataSetQueryParam); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java index b06e363f4..54b24dd84 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java @@ -34,7 +34,7 @@ public class DefaultDimValueConverter implements QueryConverter { @Override public void convert(QueryStatement queryStatement) { - List dimensions = queryStatement.getSemanticModel().getDimensions().stream() + List dimensions = queryStatement.getOntology().getDimensions().stream() .filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues())) .collect(Collectors.toList()); if (CollectionUtils.isEmpty(dimensions)) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java index b85682f31..12c8dd722 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java @@ -60,7 +60,7 @@ public class ParserDefaultConverter implements QueryConverter { // support detail query if (queryParam.getQueryType().isNativeAggQuery() && CollectionUtils.isEmpty(metricQueryParam.getMetrics())) { - Map modelMap = queryStatement.getSemanticModel().getModelMap(); + Map modelMap = queryStatement.getOntology().getModelMap(); for (Long modelId : modelMap.keySet()) { String modelBizName = modelMap.get(modelId).getName(); String internalMetricName = diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java index 5052059bd..a73f4ecf5 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java @@ -39,8 +39,8 @@ public class SqlVariableParseConverter implements QueryConverter { SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), modelResp.getModelDetail().getSqlVariables(), queryStatement.getQueryParam().getParams()); - DataModel dataModel = queryStatement.getSemanticModel().getDatasourceMap() - .get(modelResp.getBizName()); + DataModel dataModel = + queryStatement.getOntology().getDatasourceMap().get(modelResp.getBizName()); dataModel.setSqlQuery(sqlParsed); } } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java index f1b43a77d..f6bcf80ce 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java @@ -34,7 +34,7 @@ import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.SemanticTranslator; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService; @@ -51,6 +51,7 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.BeanUtils; import org.springframework.stereotype.Service; import java.util.ArrayList; @@ -271,23 +272,6 @@ public class S2SemanticLayerService implements SemanticLayerService { return metricService.getMetrics(metaFilter); } - private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) - throws Exception { - // If dataSetId or DataSetName is empty, parse dataSetId from the SQL - if (querySqlReq.needGetDataSetId()) { - Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user); - querySqlReq.setDataSetId(dataSetId); - } - SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq); - SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); - QueryStatement queryStatement = queryReqConverter.convert(querySqlReq, semanticSchemaResp); - queryStatement.setModelIds(querySqlReq.getModelIds()); - queryStatement.setEnableOptimize(queryUtils.enableOptimize()); - queryStatement.setSemanticSchemaResp(semanticSchemaResp); - queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp)); - return queryStatement; - } - private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user) throws Exception { QueryStatement queryStatement = null; @@ -310,18 +294,29 @@ public class S2SemanticLayerService implements SemanticLayerService { return queryStatement; } + private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) { + // If dataSetId or DataSetName is empty, parse dataSetId from the SQL + if (querySqlReq.needGetDataSetId()) { + Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user); + querySqlReq.setDataSetId(dataSetId); + } + SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq); + SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); + return queryReqConverter.buildQueryStatement(querySqlReq, semanticSchemaResp); + } + private QueryStatement buildStructQueryStatement(QueryStructReq queryStructReq) { SchemaFilterReq filter = buildSchemaFilterReq(queryStructReq); SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); QueryStatement queryStatement = new QueryStatement(); QueryParam queryParam = new QueryParam(); - queryReqConverter.convert(queryStructReq, queryParam); + BeanUtils.copyProperties(queryStructReq, queryParam); queryStatement.setQueryParam(queryParam); queryStatement.setIsS2SQL(false); queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setDataSetId(queryStructReq.getDataSetId()); queryStatement.setSemanticSchemaResp(semanticSchemaResp); - queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp)); + queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); return queryStatement; } @@ -330,9 +325,9 @@ public class S2SemanticLayerService implements SemanticLayerService { List sqlParsers = new ArrayList<>(); for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) { QueryStatement queryStatement = buildQueryStatement(queryStructReq, user); - SemanticModel semanticModel = queryStatement.getSemanticModel(); + Ontology ontology = queryStatement.getOntology(); queryStatement.setModelIds(queryStructReq.getModelIds()); - queryStatement.setSemanticModel(semanticModel); + queryStatement.setOntology(ontology); queryStatement.setEnableOptimize(queryUtils.enableOptimize()); semanticTranslator.translate(queryStatement); sqlParsers.add(queryStatement); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index bb978fca9..b4a8cc25a 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -18,7 +18,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materializa import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; @@ -57,9 +57,9 @@ public class SemanticSchemaManager { this.schemaService = schemaService; } - public SemanticModel getSemanticModel(SemanticSchemaResp semanticSchemaResp) { - SemanticModel semanticModel = new SemanticModel(); - semanticModel.setSchemaKey(semanticSchemaResp.getSchemaKey()); + public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) { + Ontology ontology = new Ontology(); + ontology.setSchemaKey(semanticSchemaResp.getSchemaKey()); Map> dimensionYamlTpls = new HashMap<>(); List dataModelYamlTpls = new ArrayList<>(); List metricYamlTpls = new ArrayList<>(); @@ -67,36 +67,35 @@ public class SemanticSchemaManager { schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls, metricYamlTpls, modelIdName); DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp(); - semanticModel.setDatabase(DatabaseConverter.convert(databaseResp)); + ontology.setDatabase(DatabaseConverter.convert(databaseResp)); if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) { - semanticModel.setJoinRelations( + ontology.setJoinRelations( getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName)); } if (!dataModelYamlTpls.isEmpty()) { Map dataSourceMap = dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect( Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1)); - semanticModel.setDatasourceMap(dataSourceMap); + ontology.setDatasourceMap(dataSourceMap); } if (!dimensionYamlTpls.isEmpty()) { Map> dimensionMap = new HashMap<>(); for (Map.Entry> entry : dimensionYamlTpls.entrySet()) { dimensionMap.put(entry.getKey(), getDimensions(entry.getValue())); } - semanticModel.setDimensionMap(dimensionMap); + ontology.setDimensionMap(dimensionMap); } if (!metricYamlTpls.isEmpty()) { - semanticModel.setMetrics(getMetrics(metricYamlTpls)); + ontology.setMetrics(getMetrics(metricYamlTpls)); } - return semanticModel; + return ontology; } - public SemanticModel getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) - throws Exception { + public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception { if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) { throw new Exception("semanticSchemaResp tag is empty"); } - SemanticModel semanticModel = getSemanticModel(semanticSchemaResp); + Ontology ontology = buildOntology(semanticSchemaResp); // Map> dimensions = new HashMap<>(); Map> tagMap = new HashMap<>(); for (TagResp tagResp : semanticSchemaResp.getTags()) { @@ -105,24 +104,24 @@ public class SemanticSchemaManager { } tagMap.get(tagResp.getModelId()).add(tagResp); } - if (Objects.nonNull(semanticModel.getDatasourceMap()) - && !semanticModel.getDatasourceMap().isEmpty()) { - for (Map.Entry entry : semanticModel.getDatasourceMap().entrySet()) { + if (Objects.nonNull(ontology.getDatasourceMap()) + && !ontology.getDatasourceMap().isEmpty()) { + for (Map.Entry entry : ontology.getDatasourceMap().entrySet()) { List modelDimensions = new ArrayList<>(); - if (!semanticModel.getDimensionMap().containsKey(entry.getKey())) { - semanticModel.getDimensionMap().put(entry.getKey(), modelDimensions); + if (!ontology.getDimensionMap().containsKey(entry.getKey())) { + ontology.getDimensionMap().put(entry.getKey(), modelDimensions); } else { - modelDimensions = semanticModel.getDimensionMap().get(entry.getKey()); + modelDimensions = ontology.getDimensionMap().get(entry.getKey()); } if (tagMap.containsKey(entry.getValue().getId())) { for (TagResp tagResp : tagMap.get(entry.getValue().getId())) { - addTagModel(tagResp, modelDimensions, semanticModel.getMetrics()); + addTagModel(tagResp, modelDimensions, ontology.getMetrics()); } } } } - return semanticModel; + return ontology; } private void addTagModel(TagResp tagResp, List modelDimensions, diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java index 3236356f5..677743985 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java @@ -29,6 +29,7 @@ import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory; import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; +import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; @@ -58,8 +59,14 @@ public class QueryReqConverter { @Autowired private SqlGenerateUtils sqlGenerateUtils; - public QueryStatement convert(QuerySqlReq querySQLReq, SemanticSchemaResp semanticSchemaResp) - throws Exception { + @Autowired + private QueryUtils queryUtils; + + @Autowired + private SemanticSchemaManager semanticSchemaManager; + + public QueryStatement buildQueryStatement(QuerySqlReq querySQLReq, + SemanticSchemaResp semanticSchemaResp) { if (semanticSchemaResp == null) { return new QueryStatement(); @@ -87,17 +94,14 @@ public class QueryReqConverter { List metrics = metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList()); QueryStructReq queryStructReq = new QueryStructReq(); + MetricTable metricTable = new MetricTable(); - metricTable.setMetrics(metrics); - + metricTable.getMetrics().addAll(metrics); Set dimensions = getDimensions(semanticSchemaResp, allFields); - - metricTable.setDimensions(new ArrayList<>(dimensions)); - + metricTable.getDimensions().addAll(dimensions); metricTable.setAlias(tableName.toLowerCase()); // if metric empty , fill model default if (CollectionUtils.isEmpty(metricTable.getMetrics())) { - metricTable.setMetrics(new ArrayList<>()); metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); } else { @@ -122,14 +126,15 @@ public class QueryReqConverter { } // 7. do deriveMetric generateDerivedMetric(semanticSchemaResp, aggOption, result); - // 8.physicalSql by ParseSqlReq + // 8.physicalSql by ParseSqlReq queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(querySQLReq.getSql())); queryStructReq.setDataSetId(querySQLReq.getDataSetId()); queryStructReq.setQueryType(getQueryType(aggOption)); log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq); QueryParam queryParam = new QueryParam(); - convert(queryStructReq, queryParam); + BeanUtils.copyProperties(queryStructReq, queryParam); + QueryStatement queryStatement = new QueryStatement(); queryStatement.setQueryParam(queryParam); queryStatement.setDataSetQueryParam(result); @@ -137,17 +142,14 @@ public class QueryReqConverter { queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq)); queryStatement.setDataSetId(querySQLReq.getDataSetId()); queryStatement.setLimit(querySQLReq.getLimit()); + queryStatement.setModelIds(querySQLReq.getModelIds()); + queryStatement.setEnableOptimize(queryUtils.enableOptimize()); + queryStatement.setSemanticSchemaResp(semanticSchemaResp); + queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); return queryStatement; } - public void convert(QueryStructReq queryStructReq, QueryParam queryParam) { - BeanUtils.copyProperties(queryStructReq, queryParam); - queryParam.setOrders(queryStructReq.getOrders()); - queryParam.setMetrics(queryStructReq.getMetrics()); - queryParam.setGroups(queryStructReq.getGroups()); - } - private AggOption getAggOption(QuerySqlReq databaseReq, List metricSchemas) { String sql = databaseReq.getSql(); if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) From d7586a5d3b3c4dcd1b2f977d2388a2da088996ea Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Wed, 20 Nov 2024 00:52:59 +0800 Subject: [PATCH 07/88] [improvement][headless]Clean code logic of headless translator. --- .../api/pojo/response/SemanticSchemaResp.java | 10 - .../rule/detail/DetailSemanticQuery.java | 6 - .../rule/metric/MetricSemanticQuery.java | 6 - .../core/executor/AbstractAccelerator.java | 10 +- .../translator/DefaultSemanticTranslator.java | 358 ++++++++++++++++- .../calcite/CalciteQueryParser.java | 66 +-- .../translator/calcite/planner/Planner.java | 17 - .../translator/calcite/s2sql/DataModel.java | 2 +- .../translator/calcite/s2sql/Dimension.java | 1 - .../core/translator/calcite/s2sql/Metric.java | 1 - .../translator/calcite/s2sql/Ontology.java | 1 - .../{schema => s2sql}/SemanticItem.java | 5 +- .../calcite/schema/S2SemanticSchema.java | 137 ------- .../FilterToGroupScanRule.java | 18 +- .../translator/calcite/sql/Optimization.java | 8 - .../{schema => sql}/RuntimeOptions.java | 2 +- .../calcite/sql/S2CalciteSchema.java | 48 +++ .../S2CalciteTable.java} | 10 +- .../{schema => sql}/SchemaBuilder.java | 16 +- .../AggPlanner.java => sql/SqlBuilder.java} | 119 +++--- ...DataSourceNode.java => DataModelNode.java} | 44 +- .../translator/calcite/sql/node/JoinNode.java | 13 - .../{extend => }/LateralViewExplodeNode.java | 3 +- .../calcite/sql/node/MetricNode.java | 4 +- .../calcite/sql/node/SemanticNode.java | 8 +- .../calcite/sql/render/FilterRender.java | 8 +- .../calcite/sql/render/JoinRender.java | 39 +- .../calcite/sql/render/OutputRender.java | 8 +- .../calcite/sql/{ => render}/Renderer.java | 11 +- .../calcite/sql/render/SourceRender.java | 32 +- .../service/impl/S2SemanticLayerService.java | 140 +++---- .../server/manager/SemanticSchemaManager.java | 11 +- .../server/utils/MetricDrillDownChecker.java | 2 +- .../server/utils/QueryReqConverter.java | 378 ------------------ .../headless/server/utils/QueryUtils.java | 8 +- .../calcite/HeadlessParserServiceTest.java | 23 +- .../tencent/supersonic/demo/S2VisitsDemo.java | 4 +- 37 files changed, 651 insertions(+), 926 deletions(-) delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/{schema => s2sql}/SemanticItem.java (66%) delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/{optimizer => }/FilterToGroupScanRule.java (87%) delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/{schema => sql}/RuntimeOptions.java (97%) create mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/{schema/DataSourceTable.java => sql/S2CalciteTable.java} (92%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/{schema => sql}/SchemaBuilder.java (85%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/{planner/AggPlanner.java => sql/SqlBuilder.java} (59%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/{DataSourceNode.java => DataModelNode.java} (92%) delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/JoinNode.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/{extend => }/LateralViewExplodeNode.java (96%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/{ => render}/Renderer.java (94%) delete mode 100644 headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/SemanticSchemaResp.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/SemanticSchemaResp.java index fd7c39836..767b7dfdf 100644 --- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/SemanticSchemaResp.java +++ b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/SemanticSchemaResp.java @@ -7,14 +7,11 @@ import com.tencent.supersonic.headless.api.pojo.enums.SchemaType; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import org.apache.commons.lang3.StringUtils; import java.util.HashSet; import java.util.List; import java.util.Set; -import static com.tencent.supersonic.common.pojo.Constants.UNDERLINE; - @Data @AllArgsConstructor @NoArgsConstructor @@ -32,13 +29,6 @@ public class SemanticSchemaResp { private DatabaseResp databaseResp; private QueryType queryType; - public String getSchemaKey() { - if (dataSetId == null) { - return String.format("%s_%s", schemaType, StringUtils.join(modelIds, UNDERLINE)); - } - return String.format("%s_%s", schemaType, dataSetId); - } - public MetricSchemaResp getMetric(String bizName) { return metrics.stream().filter(metric -> bizName.equalsIgnoreCase(metric.getBizName())) .findFirst().orElse(null); diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/detail/DetailSemanticQuery.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/detail/DetailSemanticQuery.java index 4ad32b274..48d191015 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/detail/DetailSemanticQuery.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/detail/DetailSemanticQuery.java @@ -21,12 +21,6 @@ public abstract class DetailSemanticQuery extends RuleSemanticQuery { super(); } - @Override - public List match(List candidateElementMatches, - ChatQueryContext queryCtx) { - return super.match(candidateElementMatches, queryCtx); - } - @Override public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) { super.fillParseInfo(chatQueryContext, dataSetId); diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/metric/MetricSemanticQuery.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/metric/MetricSemanticQuery.java index be4767cab..a4d787b9e 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/metric/MetricSemanticQuery.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/rule/metric/MetricSemanticQuery.java @@ -25,12 +25,6 @@ public abstract class MetricSemanticQuery extends RuleSemanticQuery { queryMatcher.addOption(METRIC, REQUIRED, AT_LEAST, 1); } - @Override - public List match(List candidateElementMatches, - ChatQueryContext queryCtx) { - return super.match(candidateElementMatches, queryCtx); - } - @Override public void fillParseInfo(ChatQueryContext chatQueryContext, Long dataSetId) { super.fillParseInfo(chatQueryContext, dataSetId); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/AbstractAccelerator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/AbstractAccelerator.java index fd26da4d2..282a60a35 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/AbstractAccelerator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/executor/AbstractAccelerator.java @@ -4,9 +4,9 @@ import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.headless.core.pojo.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.TimeRange; -import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable; -import com.tencent.supersonic.headless.core.translator.calcite.schema.DataSourceTable.Builder; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable.Builder; +import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.adapter.enumerable.EnumerableRules; import org.apache.calcite.config.CalciteConnectionConfigImpl; @@ -156,14 +156,14 @@ public abstract class AbstractAccelerator implements QueryAccelerator { String[] dbTable = materialization.getName().split("\\."); String tb = dbTable[1].toLowerCase(); String db = dbTable[0].toLowerCase(); - Builder builder = DataSourceTable.newBuilder(tb); + Builder builder = S2CalciteTable.newBuilder(tb); for (String f : materialization.getColumns()) { builder.addField(f, SqlTypeName.VARCHAR); } if (StringUtils.isNotBlank(materialization.getPartitionName())) { builder.addField(materialization.getPartitionName(), SqlTypeName.VARCHAR); } - DataSourceTable srcTable = builder.withRowCount(1L).build(); + S2CalciteTable srcTable = builder.withRowCount(1L).build(); if (Objects.nonNull(db) && !db.isEmpty()) { SchemaPlus schemaPlus = dataSetSchema.plus().getSubSchema(db); if (Objects.isNull(schemaPlus)) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 0eb64a7ee..a1f7a5f48 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -1,34 +1,72 @@ package com.tencent.supersonic.headless.core.translator; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; +import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper; +import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; +import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; +import com.tencent.supersonic.common.pojo.Aggregator; +import com.tencent.supersonic.common.pojo.Constants; +import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; import com.tencent.supersonic.common.pojo.enums.EngineType; +import com.tencent.supersonic.common.pojo.enums.QueryType; +import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.util.StringUtil; +import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.MetricTable; import com.tencent.supersonic.headless.api.pojo.QueryParam; +import com.tencent.supersonic.headless.api.pojo.SchemaItem; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; +import com.tencent.supersonic.headless.api.pojo.enums.MetricType; +import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; +import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; +import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp; +import com.tencent.supersonic.headless.api.pojo.response.MetricResp; +import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; +import com.tencent.supersonic.headless.api.pojo.response.ModelResp; +import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; +import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor; +import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory; import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.utils.ComponentFactory; +import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.springframework.beans.BeanUtils; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; @Component @Slf4j public class DefaultSemanticTranslator implements SemanticTranslator { + @Autowired + private SqlGenerateUtils sqlGenerateUtils; + public void translate(QueryStatement queryStatement) { + if (queryStatement.isTranslated()) { + return; + } + try { + preprocess(queryStatement); parse(queryStatement); optimize(queryStatement); } catch (Exception e) { @@ -36,12 +74,6 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } } - public void optimize(QueryStatement queryStatement) { - for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) { - queryOptimizer.rewrite(queryStatement); - } - } - private void parse(QueryStatement queryStatement) throws Exception { QueryParam queryParam = queryStatement.getQueryParam(); if (Objects.isNull(queryStatement.getDataSetQueryParam())) { @@ -50,6 +82,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { if (Objects.isNull(queryStatement.getMetricQueryParam())) { queryStatement.setMetricQueryParam(new MetricQueryParam()); } + log.debug("SemanticConverter before [{}]", queryParam); for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) { if (headlessConverter.accept(queryStatement)) { @@ -59,6 +92,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } log.debug("SemanticConverter after {} {} {}", queryParam, queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam()); + if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) { doParse(queryStatement.getDataSetQueryParam(), queryStatement); } else { @@ -67,6 +101,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { doParse(queryStatement, AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery())); } + if (StringUtils.isEmpty(queryStatement.getSql())) { throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg()); } @@ -147,14 +182,15 @@ public class DefaultSemanticTranslator implements SemanticTranslator { private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable, DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception { - MetricQueryParam metricReq = new MetricQueryParam(); - metricReq.setMetrics(metricTable.getMetrics()); - metricReq.setDimensions(metricTable.getDimensions()); - metricReq.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere())); - metricReq.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption())); + MetricQueryParam metricQueryParam = new MetricQueryParam(); + metricQueryParam.setMetrics(metricTable.getMetrics()); + metricQueryParam.setDimensions(metricTable.getDimensions()); + metricQueryParam.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere())); + metricQueryParam.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption())); + QueryStatement tableSql = new QueryStatement(); tableSql.setIsS2SQL(false); - tableSql.setMetricQueryParam(metricReq); + tableSql.setMetricQueryParam(metricQueryParam); tableSql.setMinMaxTime(queryStatement.getMinMaxTime()); tableSql.setEnableOptimize(queryStatement.getEnableOptimize()); tableSql.setDataSetId(queryStatement.getDataSetId()); @@ -170,4 +206,302 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } return tableSql; } + + private void optimize(QueryStatement queryStatement) { + for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) { + queryOptimizer.rewrite(queryStatement); + } + } + + private void preprocess(QueryStatement queryStatement) { + if (StringUtils.isBlank(queryStatement.getSql())) { + return; + } + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + + convertNameToBizName(queryStatement); + rewriteFunction(queryStatement); + queryStatement.setSql(SqlRemoveHelper.removeUnderscores(queryStatement.getSql())); + + String tableName = SqlSelectHelper.getTableName(queryStatement.getSql()); + if (StringUtils.isEmpty(tableName)) { + return; + } + // correct order item is same as agg alias + String reqSql = queryStatement.getSql(); + queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(queryStatement.getSql())); + log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql()); + // 5.build MetricTables + List allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql()); + List metricSchemas = getMetrics(semanticSchemaResp, allFields); + List metrics = + metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); + Set dimensions = getDimensions(semanticSchemaResp, allFields); + QueryStructReq queryStructReq = new QueryStructReq(); + + MetricTable metricTable = new MetricTable(); + metricTable.getMetrics().addAll(metrics); + metricTable.getDimensions().addAll(dimensions); + metricTable.setAlias(tableName.toLowerCase()); + // if metric empty , fill model default + if (CollectionUtils.isEmpty(metricTable.getMetrics())) { + metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( + getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); + } else { + queryStructReq.getAggregators() + .addAll(metricTable.getMetrics().stream() + .map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN)) + .collect(Collectors.toList())); + } + AggOption aggOption = getAggOption(queryStatement, metricSchemas); + metricTable.setAggOption(aggOption); + List tables = new ArrayList<>(); + tables.add(metricTable); + + // 6.build ParseSqlReq + DataSetQueryParam datasetQueryParam = new DataSetQueryParam(); + datasetQueryParam.setTables(tables); + datasetQueryParam.setSql(queryStatement.getSql()); + DatabaseResp database = semanticSchemaResp.getDatabaseResp(); + if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()), + database.getVersion())) { + datasetQueryParam.setSupportWith(false); + datasetQueryParam.setWithAlias(false); + } + + // 7. do deriveMetric + generateDerivedMetric(semanticSchemaResp, aggOption, datasetQueryParam); + + // 8.physicalSql by ParseSqlReq + // queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(queryStatement.getSql())); + queryStructReq.setDataSetId(queryStatement.getDataSetId()); + queryStructReq.setQueryType(getQueryType(aggOption)); + log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq); + QueryParam queryParam = new QueryParam(); + BeanUtils.copyProperties(queryStructReq, queryParam); + queryStatement.setQueryParam(queryParam); + queryStatement.setDataSetQueryParam(datasetQueryParam); + // queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq)); + } + + private AggOption getAggOption(QueryStatement queryStatement, + List metricSchemas) { + String sql = queryStatement.getSql(); + if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) + && !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) { + log.debug("getAggOption simple sql set to DEFAULT"); + return AggOption.DEFAULT; + } + // if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE" + // if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE" + if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) + || SqlSelectFunctionHelper.hasFunction(sql, "count") + || SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) { + return AggOption.OUTER; + } + // if (queryStatement.isInnerLayerNative()) { + // return AggOption.NATIVE; + // } + if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql) + || SqlSelectHelper.hasGroupBy(sql)) { + return AggOption.OUTER; + } + long defaultAggNullCnt = metricSchemas.stream().filter( + m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg())) + .count(); + if (defaultAggNullCnt > 0) { + log.debug("getAggOption find null defaultAgg metric set to NATIVE"); + return AggOption.OUTER; + } + return AggOption.DEFAULT; + } + + private void convertNameToBizName(QueryStatement queryStatement) { + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + Map fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); + String sql = queryStatement.getSql(); + log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(), + sql); + sql = SqlReplaceHelper.replaceSqlByPositions(sql); + log.debug("replaceSqlByPositions:{}", sql); + sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); + log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(), + sql); + sql = SqlReplaceHelper.replaceTable(sql, + Constants.TABLE_PREFIX + queryStatement.getDataSetId()); + log.debug("replaceTableName after:{}", sql); + queryStatement.setSql(sql); + } + + private Set getDimensions(SemanticSchemaResp semanticSchemaResp, + List allFields) { + Map dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream() + .collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), + SchemaItem::getBizName, (k1, k2) -> k1)); + dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(), + TimeDimensionEnum.DAY.getName()); + return allFields.stream() + .filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase())) + .map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase())) + .collect(Collectors.toSet()); + } + + private List getMetrics(SemanticSchemaResp semanticSchemaResp, + List allFields) { + Map metricLowerToNameMap = + semanticSchemaResp.getMetrics().stream().collect(Collectors + .toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry)); + return allFields.stream() + .filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase())) + .map(entry -> metricLowerToNameMap.get(entry.toLowerCase())) + .collect(Collectors.toList()); + } + + private void rewriteFunction(QueryStatement queryStatement) { + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + DatabaseResp database = semanticSchemaResp.getDatabaseResp(); + if (Objects.isNull(database) || Objects.isNull(database.getType())) { + return; + } + String type = database.getType(); + DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase()); + if (Objects.nonNull(engineAdaptor)) { + String functionNameCorrector = + engineAdaptor.functionNameCorrector(queryStatement.getSql()); + queryStatement.setSql(functionNameCorrector); + } + } + + protected Map getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) { + // support fieldName and field alias to bizName + Map dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap( + entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); + + Map metricResults = semanticSchemaResp.getMetrics().stream().flatMap( + entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); + + dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap()); + dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap()); + dimensionResults.putAll(metricResults); + return dimensionResults; + } + + private Stream> getPairStream(String aliasStr, String name, + String bizName) { + Set> elements = new HashSet<>(); + elements.add(Pair.of(name, bizName)); + if (StringUtils.isNotBlank(aliasStr)) { + List aliasList = SchemaItem.getAliasList(aliasStr); + for (String alias : aliasList) { + elements.add(Pair.of(alias, bizName)); + } + } + return elements.stream(); + } + + private QueryType getQueryType(AggOption aggOption) { + boolean isAgg = AggOption.isAgg(aggOption); + QueryType queryType = QueryType.DETAIL; + if (isAgg) { + queryType = QueryType.AGGREGATE; + } + return queryType; + } + + private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption, + DataSetQueryParam viewQueryParam) { + String sql = viewQueryParam.getSql(); + for (MetricTable metricTable : viewQueryParam.getTables()) { + Set measures = new HashSet<>(); + Map replaces = generateDerivedMetric(semanticSchemaResp, aggOption, + metricTable.getMetrics(), metricTable.getDimensions(), measures); + + if (!CollectionUtils.isEmpty(replaces)) { + // metricTable sql use measures replace metric + sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces); + metricTable.setAggOption(AggOption.NATIVE); + // metricTable use measures replace metric + if (!CollectionUtils.isEmpty(measures)) { + metricTable.setMetrics(new ArrayList<>(measures)); + } else { + // empty measure , fill default + metricTable.setMetrics(new ArrayList<>()); + metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( + getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); + } + } + } + viewQueryParam.setSql(sql); + } + + private Map generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, + AggOption aggOption, List metrics, List dimensions, + Set measures) { + Map result = new HashMap<>(); + List metricResps = semanticSchemaResp.getMetrics(); + List dimensionResps = semanticSchemaResp.getDimensions(); + + // Check if any metric is derived + boolean hasDerivedMetrics = + metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType + .isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams())); + if (!hasDerivedMetrics) { + return result; + } + + log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics); + + Set allFields = new HashSet<>(); + Map allMeasures = new HashMap<>(); + semanticSchemaResp.getModelResps().forEach(modelResp -> { + allFields.addAll(modelResp.getFieldList()); + if (modelResp.getModelDetail().getMeasures() != null) { + modelResp.getModelDetail().getMeasures() + .forEach(measure -> allMeasures.put(measure.getBizName(), measure)); + } + }); + + Set derivedDimensions = new HashSet<>(); + Set derivedMetrics = new HashSet<>(); + Map visitedMetrics = new HashMap<>(); + + for (MetricResp metricResp : metricResps) { + if (metrics.contains(metricResp.getBizName())) { + boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(), + metricResp.getMetricDefineByMeasureParams()); + if (isDerived) { + String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields, + allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp), + metricResp.getMetricDefineType(), aggOption, visitedMetrics, + derivedMetrics, derivedDimensions); + result.put(metricResp.getBizName(), expr); + log.debug("derived metric {}->{}", metricResp.getBizName(), expr); + } else { + measures.add(metricResp.getBizName()); + } + } + } + + measures.addAll(derivedMetrics); + derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension)) + .forEach(dimensions::add); + + return result; + } + + private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List dimensions) { + if (!CollectionUtils.isEmpty(dimensions)) { + Map modelMatchCnt = new HashMap<>(); + for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { + modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions() + .stream().filter(d -> dimensions.contains(d.getBizName())).count()); + } + return modelMatchCnt.entrySet().stream() + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) + .map(m -> m.getKey()).findFirst().orElse(""); + } + return semanticSchemaResp.getModelResps().get(0).getBizName(); + } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index 5b9bc18c5..ce0d27a11 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -1,22 +1,15 @@ package com.tencent.supersonic.headless.core.translator.calcite; -import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; -import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.QueryParser; -import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; -import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.RuntimeOptions; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder; import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.sql.parser.SqlParseException; import org.springframework.stereotype.Component; -import java.util.Collections; -import java.util.Objects; - /** the calcite parse implements */ @Component("CalciteQueryParser") @Slf4j @@ -24,54 +17,19 @@ public class CalciteQueryParser implements QueryParser { @Override public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception { - MetricQueryParam metricReq = queryStatement.getMetricQueryParam(); Ontology ontology = queryStatement.getOntology(); if (ontology == null) { - queryStatement.setErrMsg("semanticSchema not found"); + queryStatement.setErrMsg("No ontology could be found"); return; } - queryStatement.setMetricQueryParam(metricReq); - S2SemanticSchema semanticSchema = getSemanticSchema(ontology, queryStatement); - AggPlanner aggPlanner = new AggPlanner(semanticSchema); - aggPlanner.plan(queryStatement, isAgg); - EngineType engineType = EngineType.fromString(ontology.getDatabase().getType()); - queryStatement.setSql(aggPlanner.getSql(engineType)); - if (Objects.nonNull(queryStatement.getEnableOptimize()) - && queryStatement.getEnableOptimize() - && Objects.nonNull(queryStatement.getDataSetAlias()) - && !queryStatement.getDataSetAlias().isEmpty()) { - // simplify model sql with query sql - String simplifySql = aggPlanner.simplify( - getSqlByDataSet(engineType, aggPlanner.getSql(engineType), - queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), - engineType); - if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) { - log.debug("simplifySql [{}]", simplifySql); - queryStatement.setDataSetSimplifySql(simplifySql); - } - } + + S2CalciteSchema semanticSchema = S2CalciteSchema.builder() + .schemaKey("DATASET_" + queryStatement.getDataSetId()).ontology(ontology) + .runtimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime()) + .enableOptimize(queryStatement.getEnableOptimize()).build()) + .build(); + SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema); + sqlBuilder.build(queryStatement, isAgg); } - private S2SemanticSchema getSemanticSchema(Ontology ontology, QueryStatement queryStatement) { - S2SemanticSchema semanticSchema = - S2SemanticSchema.newBuilder(ontology.getSchemaKey()).build(); - semanticSchema.setSemanticModel(ontology); - semanticSchema.setDatasource(ontology.getDatasourceMap()); - semanticSchema.setDimension(ontology.getDimensionMap()); - semanticSchema.setMetric(ontology.getMetrics()); - semanticSchema.setJoinRelations(ontology.getJoinRelations()); - semanticSchema.setRuntimeOptions( - RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime()) - .enableOptimize(queryStatement.getEnableOptimize()).build()); - return semanticSchema; - } - - private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql, - String parentAlias) throws SqlParseException { - if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) { - return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql); - } - return SqlMergeWithUtils.mergeWith(engineType, dataSetSql, - Collections.singletonList(parentSql), Collections.singletonList(parentAlias)); - } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java deleted file mode 100644 index cccd9a71a..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/Planner.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.tencent.supersonic.headless.core.translator.calcite.planner; - -import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; -import com.tencent.supersonic.headless.core.pojo.QueryStatement; - -/** parse and generate SQL and other execute information */ -public interface Planner { - - void plan(QueryStatement queryStatement, AggOption aggOption) throws Exception; - - String getSql(EngineType enginType); - - String getSourceId(); - - String simplify(String sql, EngineType engineType); -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java index 4486f9572..1de81b929 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/DataModel.java @@ -13,7 +13,7 @@ public class DataModel { private String name; - private Long sourceId; + private Long modelId; private String type; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Dimension.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Dimension.java index 98aa5b8f3..559444f32 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Dimension.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Dimension.java @@ -1,6 +1,5 @@ package com.tencent.supersonic.headless.core.translator.calcite.s2sql; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem; import lombok.Builder; import lombok.Data; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Metric.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Metric.java index 366f2c60c..d58328ec3 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Metric.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Metric.java @@ -1,6 +1,5 @@ package com.tencent.supersonic.headless.core.translator.calcite.s2sql; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticItem; import lombok.Data; import java.util.List; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java index 3a0e8e489..fc5564ccf 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java @@ -13,7 +13,6 @@ import java.util.stream.Collectors; @Data public class Ontology { - private String schemaKey; private List metrics = new ArrayList<>(); private Map datasourceMap = new HashMap<>(); private Map> dimensionMap = new HashMap<>(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticItem.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticItem.java similarity index 66% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticItem.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticItem.java index 8d03edf65..be239b1d7 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SemanticItem.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/SemanticItem.java @@ -1,6 +1,7 @@ -package com.tencent.supersonic.headless.core.translator.calcite.schema; +package com.tencent.supersonic.headless.core.translator.calcite.s2sql; public interface SemanticItem { + String getName(); - public String getName(); + String getType(); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java deleted file mode 100644 index 8eff0c8dd..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/S2SemanticSchema.java +++ /dev/null @@ -1,137 +0,0 @@ -package com.tencent.supersonic.headless.core.translator.calcite.schema; - -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; -import org.apache.calcite.schema.Schema; -import org.apache.calcite.schema.SchemaVersion; -import org.apache.calcite.schema.Table; -import org.apache.calcite.schema.impl.AbstractSchema; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class S2SemanticSchema extends AbstractSchema { - - private final String schemaKey; - - private final Map tableMap; - - private Ontology ontology = new Ontology(); - - private List joinRelations; - - private RuntimeOptions runtimeOptions; - - private S2SemanticSchema(String schemaKey, Map tableMap) { - this.schemaKey = schemaKey; - this.tableMap = tableMap; - } - - public static Builder newBuilder(String schemaKey) { - return new Builder(schemaKey); - } - - public String getSchemaKey() { - return schemaKey; - } - - public void setSemanticModel(Ontology ontology) { - this.ontology = ontology; - } - - public Ontology getSemanticModel() { - return ontology; - } - - @Override - public Map getTableMap() { - return tableMap; - } - - @Override - public Schema snapshot(SchemaVersion version) { - return this; - } - - public Map getDatasource() { - return ontology.getDatasourceMap(); - } - - public void setDatasource(Map datasource) { - ontology.setDatasourceMap(datasource); - } - - public Map> getDimension() { - return ontology.getDimensionMap(); - } - - public void setDimension(Map> dimensions) { - ontology.setDimensionMap(dimensions); - } - - public List getMetrics() { - return ontology.getMetrics(); - } - - public void setMetric(List metric) { - ontology.setMetrics(metric); - } - - public void setMaterializationList(List materializationList) { - ontology.setMaterializationList(materializationList); - } - - public List getMaterializationList() { - return ontology.getMaterializationList(); - } - - public void setJoinRelations(List joinRelations) { - this.joinRelations = joinRelations; - } - - public List getJoinRelations() { - return joinRelations; - } - - public void setRuntimeOptions(RuntimeOptions runtimeOptions) { - this.runtimeOptions = runtimeOptions; - } - - public RuntimeOptions getRuntimeOptions() { - return runtimeOptions; - } - - public static final class Builder { - - private final String schemaKey; - private final Map tableMap = new HashMap<>(); - - private Builder(String schemaKey) { - if (schemaKey == null) { - throw new IllegalArgumentException("Schema name cannot be null or empty"); - } - - this.schemaKey = schemaKey; - } - - public Builder addTable(DataSourceTable table) { - if (tableMap.containsKey(table.getTableName())) { - throw new IllegalArgumentException( - "Table already defined: " + table.getTableName()); - } - - tableMap.put(table.getTableName(), table); - - return this; - } - - public S2SemanticSchema build() { - return new S2SemanticSchema(schemaKey, tableMap); - } - } -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/FilterToGroupScanRule.java similarity index 87% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/FilterToGroupScanRule.java index bc5194970..59e163791 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/optimizer/FilterToGroupScanRule.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/FilterToGroupScanRule.java @@ -1,6 +1,5 @@ -package com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer; +package com.tencent.supersonic.headless.core.translator.calcite.sql; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelRule; import org.apache.calcite.rel.core.Aggregate; @@ -40,24 +39,23 @@ public class FilterToGroupScanRule extends RelRule implements Transforma }); }).as(FilterTableScanRule.Config.class); - private S2SemanticSchema semanticSchema; + private S2CalciteSchema schema; - public FilterToGroupScanRule(FilterTableScanRule.Config config, - S2SemanticSchema semanticSchema) { + public FilterToGroupScanRule(FilterTableScanRule.Config config, S2CalciteSchema schema) { super(config); - this.semanticSchema = semanticSchema; + this.schema = schema; } public void onMatch(RelOptRuleCall call) { if (call.rels.length != 4) { return; } - if (Objects.isNull(semanticSchema.getRuntimeOptions()) - || Objects.isNull(semanticSchema.getRuntimeOptions().getMinMaxTime()) - || semanticSchema.getRuntimeOptions().getMinMaxTime().getLeft().isEmpty()) { + if (Objects.isNull(schema.getRuntimeOptions()) + || Objects.isNull(schema.getRuntimeOptions().getMinMaxTime()) + || schema.getRuntimeOptions().getMinMaxTime().getLeft().isEmpty()) { return; } - Triple minMax = semanticSchema.getRuntimeOptions().getMinMaxTime(); + Triple minMax = schema.getRuntimeOptions().getMinMaxTime(); Filter filter = (Filter) call.rel(0); Project project0 = (Project) call.rel(1); Project project1 = (Project) call.rel(3); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java deleted file mode 100644 index 3e73b3897..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Optimization.java +++ /dev/null @@ -1,8 +0,0 @@ -package com.tencent.supersonic.headless.core.translator.calcite.sql; - -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; - -public interface Optimization { - - public void visit(S2SemanticSchema semanticSchema); -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/RuntimeOptions.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/RuntimeOptions.java similarity index 97% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/RuntimeOptions.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/RuntimeOptions.java index 1913f17a3..9bf681b3e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/RuntimeOptions.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/RuntimeOptions.java @@ -1,4 +1,4 @@ -package com.tencent.supersonic.headless.core.translator.calcite.schema; +package com.tencent.supersonic.headless.core.translator.calcite.sql; import lombok.Builder; import lombok.Data; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java new file mode 100644 index 000000000..a9de834a3 --- /dev/null +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java @@ -0,0 +1,48 @@ +package com.tencent.supersonic.headless.core.translator.calcite.sql; + +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; +import lombok.Builder; +import lombok.Data; +import org.apache.calcite.schema.Schema; +import org.apache.calcite.schema.SchemaVersion; +import org.apache.calcite.schema.impl.AbstractSchema; + +import java.util.List; +import java.util.Map; + +@Data +@Builder +public class S2CalciteSchema extends AbstractSchema { + + private String schemaKey; + + private Ontology ontology; + + private RuntimeOptions runtimeOptions; + + @Override + public Schema snapshot(SchemaVersion version) { + return this; + } + + public Map getDatasource() { + return ontology.getDatasourceMap(); + } + + public Map> getDimension() { + return ontology.getDimensionMap(); + } + + public List getJoinRelations() { + return ontology.getJoinRelations(); + } + + public List getMetrics() { + return ontology.getMetrics(); + } + +} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/DataSourceTable.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteTable.java similarity index 92% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/DataSourceTable.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteTable.java index e5e11f6dc..98a2b08fc 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/DataSourceTable.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteTable.java @@ -1,4 +1,4 @@ -package com.tencent.supersonic.headless.core.translator.calcite.schema; +package com.tencent.supersonic.headless.core.translator.calcite.sql; import org.apache.calcite.DataContext; import org.apache.calcite.linq4j.Enumerable; @@ -23,7 +23,7 @@ import java.util.ArrayList; import java.util.List; /** customize the AbstractTable */ -public class DataSourceTable extends AbstractTable implements ScannableTable, TranslatableTable { +public class S2CalciteTable extends AbstractTable implements ScannableTable, TranslatableTable { private final String tableName; private final List fieldNames; @@ -32,7 +32,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr private RelDataType rowType; - private DataSourceTable(String tableName, List fieldNames, List fieldTypes, + private S2CalciteTable(String tableName, List fieldNames, List fieldTypes, Statistic statistic) { this.tableName = tableName; this.fieldNames = fieldNames; @@ -116,7 +116,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr return this; } - public DataSourceTable build() { + public S2CalciteTable build() { if (fieldNames.isEmpty()) { throw new IllegalStateException("Table must have at least one field"); } @@ -125,7 +125,7 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr throw new IllegalStateException("Table must have positive row count"); } - return new DataSourceTable(tableName, fieldNames, fieldTypes, + return new S2CalciteTable(tableName, fieldNames, fieldTypes, Statistics.of(rowCount, null)); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java similarity index 85% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java index ec12bc402..43949e000 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/schema/SchemaBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java @@ -1,8 +1,7 @@ -package com.tencent.supersonic.headless.core.translator.calcite.schema; +package com.tencent.supersonic.headless.core.translator.calcite.sql; import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.translator.calcite.sql.S2SQLSqlValidatorImpl; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.prepare.CalciteCatalogReader; import org.apache.calcite.prepare.Prepare; @@ -27,15 +26,14 @@ public class SchemaBuilder { public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1"; public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2"; - public static SqlValidatorScope getScope(S2SemanticSchema schema) throws Exception { + public static SqlValidatorScope getScope(S2CalciteSchema schema) throws Exception { Map nameToTypeMap = new HashMap<>(); CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); rootSchema.add(schema.getSchemaKey(), schema); Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema, Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory, Configuration.config); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, Configuration.typeFactory, Configuration.getValidatorConfig(engineType)); @@ -45,12 +43,12 @@ public class SchemaBuilder { public static CalciteSchema getMaterializationSchema() { CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); SchemaPlus schema = rootSchema.plus().add(MATERIALIZATION_SYS_DB, new AbstractSchema()); - DataSourceTable srcTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_SOURCE) + S2CalciteTable srcTable = S2CalciteTable.newBuilder(MATERIALIZATION_SYS_SOURCE) .addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE) .addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1) .build(); schema.add(MATERIALIZATION_SYS_SOURCE, srcTable); - DataSourceTable dataSetTable = DataSourceTable.newBuilder(MATERIALIZATION_SYS_VIEW) + S2CalciteTable dataSetTable = S2CalciteTable.newBuilder(MATERIALIZATION_SYS_VIEW) .addField(MATERIALIZATION_SYS_FIELD_DATE, SqlTypeName.DATE) .addField(MATERIALIZATION_SYS_FIELD_DATA, SqlTypeName.BIGINT).withRowCount(1) .build(); @@ -62,7 +60,7 @@ public class SchemaBuilder { Set dates, Set dimensions, Set metrics) { String tb = tbSrc; String db = dbSrc; - DataSourceTable.Builder builder = DataSourceTable.newBuilder(tb); + S2CalciteTable.Builder builder = S2CalciteTable.newBuilder(tb); for (String date : dates) { builder.addField(date, SqlTypeName.VARCHAR); } @@ -72,7 +70,7 @@ public class SchemaBuilder { for (String metric : metrics) { builder.addField(metric, SqlTypeName.ANY); } - DataSourceTable srcTable = builder.withRowCount(1).build(); + S2CalciteTable srcTable = builder.withRowCount(1).build(); if (Objects.nonNull(db) && !db.isEmpty()) { SchemaPlus dbPs = dataSetSchema.plus(); for (String d : db.split("\\.")) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java similarity index 59% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index 12aa596da..f29f4ced5 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/planner/AggPlanner.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -1,6 +1,7 @@ -package com.tencent.supersonic.headless.core.translator.calcite.planner; +package com.tencent.supersonic.headless.core.translator.calcite.sql; import com.tencent.supersonic.common.calcite.Configuration; +import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.Database; @@ -8,20 +9,20 @@ import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; -import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; -import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; +import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.OutputRender; +import com.tencent.supersonic.headless.core.translator.calcite.sql.render.Renderer; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.validate.SqlValidatorScope; import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; @@ -29,29 +30,62 @@ import java.util.Objects; /** parsing from query dimensions and metrics */ @Slf4j -public class AggPlanner implements Planner { +public class SqlBuilder { - private MetricQueryParam metricReq; - private final S2SemanticSchema schema; + private MetricQueryParam metricQueryParam; + private final S2CalciteSchema schema; private SqlValidatorScope scope; private SqlNode parserNode; - private String sourceId; private boolean isAgg = false; private AggOption aggOption = AggOption.DEFAULT; - public AggPlanner(S2SemanticSchema schema) { + public SqlBuilder(S2CalciteSchema schema) { this.schema = schema; } - private void parse() throws Exception { + public void build(QueryStatement queryStatement, AggOption aggOption) throws Exception { + this.metricQueryParam = queryStatement.getMetricQueryParam(); + if (metricQueryParam.getMetrics() == null) { + metricQueryParam.setMetrics(new ArrayList<>()); + } + if (metricQueryParam.getDimensions() == null) { + metricQueryParam.setDimensions(new ArrayList<>()); + } + if (metricQueryParam.getLimit() == null) { + metricQueryParam.setLimit(0L); + } + this.aggOption = aggOption; + + buildParseNode(); + Database database = queryStatement.getOntology().getDatabase(); + EngineType engineType = EngineType.fromString(database.getType()); + optimizeParseNode(engineType); + String sql = getSql(engineType); + + queryStatement.setSql(sql); + if (Objects.nonNull(queryStatement.getEnableOptimize()) + && queryStatement.getEnableOptimize() + && Objects.nonNull(queryStatement.getDataSetAlias()) + && !queryStatement.getDataSetAlias().isEmpty()) { + // simplify model sql with query sql + String simplifySql = rewrite(getSqlByDataSet(engineType, sql, + queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType); + if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) { + log.debug("simplifySql [{}]", simplifySql); + queryStatement.setDataSetSimplifySql(simplifySql); + } + } + } + + private void buildParseNode() throws Exception { // find the match Datasource scope = SchemaBuilder.getScope(schema); - List datasource = getMatchDataSource(scope); - if (datasource == null || datasource.isEmpty()) { - throw new Exception("datasource not found"); + List dataModels = + DataModelNode.getRelatedDataModels(scope, schema, metricQueryParam); + if (dataModels == null || dataModels.isEmpty()) { + throw new Exception("data model not found"); } - isAgg = getAgg(datasource.get(0)); - sourceId = String.valueOf(datasource.get(0).getSourceId()); + isAgg = getAgg(dataModels.get(0)); // build level by level LinkedList builders = new LinkedList<>(); @@ -64,21 +98,17 @@ public class AggPlanner implements Planner { while (it.hasNext()) { Renderer renderer = it.next(); if (previous != null) { - previous.render(metricReq, datasource, scope, schema, !isAgg); + previous.render(metricQueryParam, dataModels, scope, schema, !isAgg); renderer.setTable(previous - .builderAs(DataSourceNode.getNames(datasource) + "_" + String.valueOf(i))); + .builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i))); i++; } previous = renderer; } - builders.getLast().render(metricReq, datasource, scope, schema, !isAgg); + builders.getLast().render(metricQueryParam, dataModels, scope, schema, !isAgg); parserNode = builders.getLast().builder(); } - private List getMatchDataSource(SqlValidatorScope scope) throws Exception { - return DataSourceNode.getMatchDataSources(scope, schema, metricReq); - } - private boolean getAgg(DataModel dataModel) { if (!AggOption.DEFAULT.equals(aggOption)) { return AggOption.isAgg(aggOption); @@ -86,46 +116,18 @@ public class AggPlanner implements Planner { // default by dataModel time aggregation if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { - if (!metricReq.isNativeQuery()) { + if (!metricQueryParam.isNativeQuery()) { return true; } } return isAgg; } - @Override - public void plan(QueryStatement queryStatement, AggOption aggOption) throws Exception { - this.metricReq = queryStatement.getMetricQueryParam(); - if (metricReq.getMetrics() == null) { - metricReq.setMetrics(new ArrayList<>()); - } - if (metricReq.getDimensions() == null) { - metricReq.setDimensions(new ArrayList<>()); - } - if (metricReq.getLimit() == null) { - metricReq.setLimit(0L); - } - this.aggOption = aggOption; - // build a parse Node - parse(); - // optimizer - Database database = queryStatement.getOntology().getDatabase(); - EngineType engineType = EngineType.fromString(database.getType()); - optimize(engineType); - } - - @Override public String getSql(EngineType engineType) { return SemanticNode.getSql(parserNode, engineType); } - @Override - public String getSourceId() { - return sourceId; - } - - @Override - public String simplify(String sql, EngineType engineType) { + private String rewrite(String sql, EngineType engineType) { try { SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); @@ -139,7 +141,7 @@ public class AggPlanner implements Planner { return ""; } - private void optimize(EngineType engineType) { + private void optimizeParseNode(EngineType engineType) { if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) || !schema.getRuntimeOptions().getEnableOptimize()) { @@ -162,4 +164,13 @@ public class AggPlanner implements Planner { } } + private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql, + String parentAlias) throws SqlParseException { + if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) { + return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql); + } + return SqlMergeWithUtils.mergeWith(engineType, dataSetSql, + Collections.singletonList(parentSql), Collections.singletonList(parentAlias)); + } + } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java similarity index 92% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index c71acbb6a..105952132 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataSourceNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -11,9 +11,8 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.schema.SchemaBuilder; -import com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend.LateralViewExplodeNode; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlDataTypeSpec; @@ -38,27 +37,27 @@ import java.util.Set; import java.util.stream.Collectors; @Slf4j -public class DataSourceNode extends SemanticNode { +public class DataModelNode extends SemanticNode { - public static SqlNode build(DataModel datasource, SqlValidatorScope scope) throws Exception { + public static SqlNode build(DataModel dataModel, SqlValidatorScope scope) throws Exception { String sqlTable = ""; - if (datasource.getSqlQuery() != null && !datasource.getSqlQuery().isEmpty()) { - sqlTable = datasource.getSqlQuery(); - } else if (datasource.getTableQuery() != null && !datasource.getTableQuery().isEmpty()) { - if (datasource.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) { - String fullTableName = Arrays.stream(datasource.getTableQuery().split("\\.")) + if (dataModel.getSqlQuery() != null && !dataModel.getSqlQuery().isEmpty()) { + sqlTable = dataModel.getSqlQuery(); + } else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) { + if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) { + String fullTableName = Arrays.stream(dataModel.getTableQuery().split("\\.")) .collect(Collectors.joining(".public.")); sqlTable = "select * from " + fullTableName; } else { - sqlTable = "select * from " + datasource.getTableQuery(); + sqlTable = "select * from " + dataModel.getTableQuery(); } } if (sqlTable.isEmpty()) { throw new Exception("DatasourceNode build error [tableSqlNode not found]"); } - SqlNode source = getTable(sqlTable, scope, EngineType.fromString(datasource.getType())); - addSchema(scope, datasource, sqlTable); - return buildAs(datasource.getName(), source); + SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType())); + addSchema(scope, dataModel, sqlTable); + return buildAs(dataModel.getName(), source); } private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table) @@ -150,7 +149,7 @@ public class DataSourceNode extends SemanticNode { return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); } - public static void getQueryDimensionMeasure(S2SemanticSchema schema, + public static void getQueryDimensionMeasure(S2CalciteSchema schema, MetricQueryParam metricCommand, Set queryDimension, List measures) { queryDimension.addAll(metricCommand.getDimensions().stream() .map(d -> d.contains(Constants.DIMENSION_IDENTIFY) @@ -166,11 +165,10 @@ public class DataSourceNode extends SemanticNode { .forEach(m -> measures.add(m)); } - public static void mergeQueryFilterDimensionMeasure(S2SemanticSchema schema, + public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, MetricQueryParam metricCommand, Set queryDimension, List measures, SqlValidatorScope scope) throws Exception { - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { Set filterConditions = new HashSet<>(); FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), @@ -193,8 +191,8 @@ public class DataSourceNode extends SemanticNode { } } - public static List getMatchDataSources(SqlValidatorScope scope, - S2SemanticSchema schema, MetricQueryParam metricCommand) throws Exception { + public static List getRelatedDataModels(SqlValidatorScope scope, + S2CalciteSchema schema, MetricQueryParam metricCommand) throws Exception { List dataModels = new ArrayList<>(); // check by metric @@ -232,7 +230,7 @@ public class DataSourceNode extends SemanticNode { filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(dimension); EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType.fromString(schema.getOntology().getDatabase().getType()); mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope); boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, @@ -302,7 +300,7 @@ public class DataSourceNode extends SemanticNode { } private static List getLinkDataSourcesByJoinRelation(Set queryDimension, - List measures, DataModel baseDataModel, S2SemanticSchema schema) { + List measures, DataModel baseDataModel, S2CalciteSchema schema) { Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); Set before = new HashSet<>(); @@ -387,7 +385,7 @@ public class DataSourceNode extends SemanticNode { private static List getLinkDataSources(Set baseIdentifiers, Set queryDimension, List measures, DataModel baseDataModel, - S2SemanticSchema schema) { + S2CalciteSchema schema) { Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); for (Map.Entry entry : schema.getDatasource().entrySet()) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/JoinNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/JoinNode.java deleted file mode 100644 index 2819942a8..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/JoinNode.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.tencent.supersonic.headless.core.translator.calcite.sql.node; - -import lombok.Data; -import org.apache.calcite.sql.SqlNode; - -@Data -public class JoinNode extends SemanticNode { - - private SqlNode join; - private SqlNode on; - private SqlNode left; - private SqlNode right; -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/extend/LateralViewExplodeNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/LateralViewExplodeNode.java similarity index 96% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/extend/LateralViewExplodeNode.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/LateralViewExplodeNode.java index d51847e7a..920bd9a1c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/extend/LateralViewExplodeNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/LateralViewExplodeNode.java @@ -1,6 +1,5 @@ -package com.tencent.supersonic.headless.core.translator.calcite.sql.node.extend; +package com.tencent.supersonic.headless.core.translator.calcite.sql.node; -import com.tencent.supersonic.headless.core.translator.calcite.sql.node.ExtendNode; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlIdentifier; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java index d50566a3e..fe4ac64d5 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/MetricNode.java @@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.node; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import lombok.Data; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.validate.SqlValidatorScope; @@ -30,7 +30,7 @@ public class MetricNode extends SemanticNode { return buildAs(metric.getName(), sqlNode); } - public static Boolean isMetricField(String name, S2SemanticSchema schema) { + public static Boolean isMetricField(String name, S2CalciteSchema schema) { Optional metric = schema.getMetrics().stream() .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java index 1ad5f569e..4f3849e2b 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/SemanticNode.java @@ -5,8 +5,8 @@ import com.tencent.supersonic.common.calcite.SemanticSqlDialect; import com.tencent.supersonic.common.calcite.SqlDialectFactory; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.sql.optimizer.FilterToGroupScanRule; +import com.tencent.supersonic.headless.core.translator.calcite.sql.FilterToGroupScanRule; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.hep.HepPlanner; @@ -397,8 +397,8 @@ public abstract class SemanticNode { return parseInfo; } - public static SqlNode optimize(SqlValidatorScope scope, S2SemanticSchema schema, - SqlNode sqlNode, EngineType engineType) { + public static SqlNode optimize(SqlValidatorScope scope, S2CalciteSchema schema, SqlNode sqlNode, + EngineType engineType) { try { HepProgramBuilder hepProgramBuilder = new HepProgramBuilder(); SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java index ea00612de..fd058739a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java @@ -5,8 +5,7 @@ import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; @@ -28,13 +27,12 @@ public class FilterRender extends Renderer { @Override public void render(MetricQueryParam metricCommand, List dataModels, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView tableView = super.tableView; SqlNode filterNode = null; List queryMetrics = new ArrayList<>(metricCommand.getMetrics()); List queryDimensions = new ArrayList<>(metricCommand.getDimensions()); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index eff262f5a..e1d745e1a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -9,11 +9,10 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode; -import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; +import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; @@ -49,10 +48,9 @@ public class JoinRender extends Renderer { @Override public void render(MetricQueryParam metricCommand, List dataModels, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricCommand.getWhere(); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); if (queryWhere != null && !queryWhere.isEmpty()) { @@ -62,7 +60,7 @@ public class JoinRender extends Renderer { } Set queryAllDimension = new HashSet<>(); List measures = new ArrayList<>(); - DataSourceNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); + DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); SqlNode left = null; TableView leftTable = null; TableView innerView = new TableView(); @@ -145,11 +143,10 @@ public class JoinRender extends Renderer { private void doMetric(Map innerSelect, TableView filterView, List queryMetrics, List reqMetrics, DataModel dataModel, - Set sourceMeasure, SqlValidatorScope scope, S2SemanticSchema schema, + Set sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); for (String m : reqMetrics) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias); @@ -181,11 +178,10 @@ public class JoinRender extends Renderer { private void doDimension(Map innerSelect, Set filterDimension, List queryDimension, List reqDimensions, DataModel dataModel, - Set dimension, SqlValidatorScope scope, S2SemanticSchema schema) + Set dimension, SqlValidatorScope scope, S2CalciteSchema schema) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); for (String d : reqDimensions) { if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) { @@ -208,7 +204,7 @@ public class JoinRender extends Renderer { .collect(Collectors.toSet()); } - private boolean getMatchMetric(S2SemanticSchema schema, Set sourceMeasure, String m, + private boolean getMatchMetric(S2CalciteSchema schema, Set sourceMeasure, String m, List queryMetrics) { Optional metric = schema.getMetrics().stream() .filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst(); @@ -229,7 +225,7 @@ public class JoinRender extends Renderer { return isAdd; } - private boolean getMatchDimension(S2SemanticSchema schema, Set sourceDimension, + private boolean getMatchDimension(S2CalciteSchema schema, Set sourceDimension, DataModel dataModel, String d, List queryDimension) { String oriDimension = d; boolean isAdd = false; @@ -263,10 +259,9 @@ public class JoinRender extends Renderer { } private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, - Map before, DataModel dataModel, S2SemanticSchema schema, + Map before, DataModel dataModel, S2CalciteSchema schema, SqlValidatorScope scope) throws Exception { - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); SqlNode condition = getCondition(leftTable, tableView, dataModel, schema, scope, engineType); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); @@ -298,7 +293,7 @@ public class JoinRender extends Renderer { } private JoinRelation getMatchJoinRelation(Map before, TableView tableView, - S2SemanticSchema schema) { + S2CalciteSchema schema) { JoinRelation matchJoinRelation = JoinRelation.builder().build(); if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { for (JoinRelation joinRelation : schema.getJoinRelations()) { @@ -338,7 +333,7 @@ public class JoinRender extends Renderer { } private SqlNode getCondition(TableView left, TableView right, DataModel dataModel, - S2SemanticSchema schema, SqlValidatorScope scope, EngineType engineType) + S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType) throws Exception { Set selectLeft = SemanticNode.getSelect(left.getTable()); @@ -413,7 +408,7 @@ public class JoinRender extends Renderer { } private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel, - S2SemanticSchema schema, SqlValidatorScope scope) throws Exception { + S2CalciteSchema schema, SqlValidatorScope scope) throws Exception { if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType()) && Materialization.TimePartType.ZIPPER .equals(right.getDataModel().getTimePartType())) { @@ -460,7 +455,7 @@ public class JoinRender extends Renderer { dateTime = partMetric.getAlias() + "." + partTime.get().getName(); } EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType.fromString(schema.getOntology().getDatabase().getType()); ArrayList operandList = new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType), SemanticNode.parse(dateTime, scope, engineType))); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java index 729022216..ef20426f9 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java @@ -4,8 +4,7 @@ import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; @@ -24,10 +23,9 @@ public class OutputRender extends Renderer { @Override public void render(MetricQueryParam metricCommand, List dataModels, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView selectDataSet = super.tableView; - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); for (String dimension : metricCommand.getDimensions()) { selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType)); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java similarity index 94% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java index f5eedce08..d4322a411 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/Renderer.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java @@ -1,4 +1,4 @@ -package com.tencent.supersonic.headless.core.translator.calcite.sql; +package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; @@ -7,7 +7,8 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; @@ -37,7 +38,7 @@ public abstract class Renderer { .findFirst(); } - public static Optional getMetricByName(String name, S2SemanticSchema schema) { + public static Optional getMetricByName(String name, S2CalciteSchema schema) { Optional metric = schema.getMetrics().stream() .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); return metric; @@ -49,7 +50,7 @@ public abstract class Renderer { } public static MetricNode buildMetricNode(String metric, DataModel datasource, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg, String alias) + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg, String alias) throws Exception { Optional metricOpt = getMetricByName(metric, schema); MetricNode metricNode = new MetricNode(); @@ -114,5 +115,5 @@ public abstract class Renderer { } public abstract void render(MetricQueryParam metricCommand, List dataModels, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception; + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index f4d2876af..428d5e6d4 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -9,10 +9,9 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; -import com.tencent.supersonic.headless.core.translator.calcite.sql.Renderer; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; -import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataSourceNode; +import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DimensionNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode; @@ -43,7 +42,7 @@ public class SourceRender extends Renderer { public static TableView renderOne(String alias, List fieldWheres, List reqMetrics, List reqDimensions, String queryWhere, - DataModel datasource, SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) + DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView dataSet = new TableView(); @@ -96,7 +95,7 @@ public class SourceRender extends Renderer { output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure())); dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure())); - SqlNode tableNode = DataSourceNode.buildExtend(datasource, extendFields, scope); + SqlNode tableNode = DataModelNode.buildExtend(datasource, extendFields, scope); dataSet.setTable(tableNode); output.setTable( SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() @@ -107,11 +106,10 @@ public class SourceRender extends Renderer { private static void buildDimension(String alias, String dimension, DataModel datasource, - S2SemanticSchema schema, boolean nonAgg, Map extendFields, + S2CalciteSchema schema, boolean nonAgg, Map extendFields, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { List dimensionList = schema.getDimension().get(datasource.getName()); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); boolean isAdd = false; if (!CollectionUtils.isEmpty(dimensionList)) { for (Dimension dim : dimensionList) { @@ -186,11 +184,10 @@ public class SourceRender extends Renderer { private static List getWhereMeasure(List fields, List queryMetrics, List queryDimensions, Map extendFields, DataModel datasource, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { Iterator iterator = fields.iterator(); List whereNode = new ArrayList<>(); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); while (iterator.hasNext()) { String cur = iterator.next(); if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) { @@ -229,7 +226,7 @@ public class SourceRender extends Renderer { private static void mergeWhere(List fields, TableView dataSet, TableView outputSet, List queryMetrics, List queryDimensions, Map extendFields, DataModel datasource, SqlValidatorScope scope, - S2SemanticSchema schema, boolean nonAgg) throws Exception { + S2CalciteSchema schema, boolean nonAgg) throws Exception { List whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource, scope, schema, nonAgg); dataSet.getMeasure().addAll(whereNode); @@ -237,7 +234,7 @@ public class SourceRender extends Renderer { } public static void whereDimMetric(List fields, List queryMetrics, - List queryDimensions, DataModel datasource, S2SemanticSchema schema, + List queryDimensions, DataModel datasource, S2CalciteSchema schema, Set dimensions, Set metrics) { for (String field : fields) { if (queryDimensions.contains(field) || queryMetrics.contains(field)) { @@ -252,7 +249,7 @@ public class SourceRender extends Renderer { } private static void addField(String field, String oriField, DataModel datasource, - S2SemanticSchema schema, Set dimensions, Set metrics) { + S2CalciteSchema schema, Set dimensions, Set metrics) { Optional dimension = datasource.getDimensions().stream() .filter(d -> d.getName().equalsIgnoreCase(field)).findFirst(); if (dimension.isPresent()) { @@ -292,7 +289,7 @@ public class SourceRender extends Renderer { } } - public static boolean isDimension(String name, DataModel datasource, S2SemanticSchema schema) { + public static boolean isDimension(String name, DataModel datasource, S2CalciteSchema schema) { Optional dimension = datasource.getDimensions().stream() .filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); if (dimension.isPresent()) { @@ -340,12 +337,11 @@ public class SourceRender extends Renderer { } public void render(MetricQueryParam metricQueryParam, List dataModels, - SqlValidatorScope scope, S2SemanticSchema schema, boolean nonAgg) throws Exception { + SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricQueryParam.getWhere(); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); - EngineType engineType = - EngineType.fromString(schema.getSemanticModel().getDatabase().getType()); + EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); if (queryWhere != null && !queryWhere.isEmpty()) { SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType); FilterNode.getFilterField(sqlNode, whereFields); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java index f6bcf80ce..c26b66c44 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java @@ -34,7 +34,6 @@ import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.SemanticTranslator; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService; @@ -44,7 +43,6 @@ import com.tencent.supersonic.headless.server.service.DimensionService; import com.tencent.supersonic.headless.server.service.MetricService; import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.utils.MetricDrillDownChecker; -import com.tencent.supersonic.headless.server.utils.QueryReqConverter; import com.tencent.supersonic.headless.server.utils.QueryUtils; import com.tencent.supersonic.headless.server.utils.StatUtils; import lombok.SneakyThrows; @@ -68,7 +66,6 @@ public class S2SemanticLayerService implements SemanticLayerService { private final StatUtils statUtils; private final QueryUtils queryUtils; - private final QueryReqConverter queryReqConverter; private final SemanticSchemaManager semanticSchemaManager; private final DataSetService dataSetService; private final SchemaService schemaService; @@ -81,14 +78,13 @@ public class S2SemanticLayerService implements SemanticLayerService { private final List queryExecutors = ComponentFactory.getQueryExecutors(); public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils, - QueryReqConverter queryReqConverter, SemanticSchemaManager semanticSchemaManager, - DataSetService dataSetService, SchemaService schemaService, - SemanticTranslator semanticTranslator, MetricDrillDownChecker metricDrillDownChecker, + SemanticSchemaManager semanticSchemaManager, DataSetService dataSetService, + SchemaService schemaService, SemanticTranslator semanticTranslator, + MetricDrillDownChecker metricDrillDownChecker, KnowledgeBaseService knowledgeBaseService, MetricService metricService, DimensionService dimensionService) { this.statUtils = statUtils; this.queryUtils = queryUtils; - this.queryReqConverter = queryReqConverter; this.semanticSchemaManager = semanticSchemaManager; this.dataSetService = dataSetService; this.schemaService = schemaService; @@ -123,7 +119,6 @@ public class S2SemanticLayerService implements SemanticLayerService { statUtils.initStatInfo(queryReq, user); // 2.query from cache - String cacheKey = queryCache.getCacheKey(queryReq); Object query = queryCache.query(queryReq, cacheKey); if (Objects.nonNull(query)) { @@ -137,16 +132,16 @@ public class S2SemanticLayerService implements SemanticLayerService { } StatUtils.get().setUseResultCache(false); - // 3 query + // 3 translate query QueryStatement queryStatement = buildQueryStatement(queryReq, user); + semanticTranslator.translate(queryStatement); + + // Check whether the dimensions of the metric drill-down are correct temporarily, + // add the abstraction of a validator later. + metricDrillDownChecker.checkQuery(queryStatement); + + // 4.execute query SemanticQueryResp queryResp = null; - - // skip translation if already done. - if (!queryStatement.isTranslated()) { - semanticTranslator.translate(queryStatement); - } - queryPreCheck(queryStatement); - for (QueryExecutor queryExecutor : queryExecutors) { if (queryExecutor.accept(queryStatement)) { queryResp = queryExecutor.execute(queryStatement); @@ -155,7 +150,7 @@ public class S2SemanticLayerService implements SemanticLayerService { } } - // 4 reset cache and set stateInfo + // 5.reset cache and set stateInfo Boolean setCacheSuccess = queryCache.put(cacheKey, queryResp); if (setCacheSuccess) { // if result is not null, update cache data @@ -186,7 +181,7 @@ public class S2SemanticLayerService implements SemanticLayerService { List dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds); - // If the search results are null, search dimensionValue from the database + // try to query dimensionValue from the database. if (CollectionUtils.isEmpty(dimensionValues)) { return getDimensionValuesFromDb(dimensionValueReq, user); } @@ -219,9 +214,29 @@ public class S2SemanticLayerService implements SemanticLayerService { .map(MapResult::getName).collect(Collectors.toList()); } - private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq dimensionValueReq, + private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq queryDimValueReq, User user) { - QuerySqlReq querySqlReq = buildQuerySqlReq(dimensionValueReq); + QuerySqlReq querySqlReq = new QuerySqlReq(); + List modelResps = + schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId())); + DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(), + queryDimValueReq.getModelId()); + ModelResp modelResp = modelResps.get(0); + String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(), + modelResp.getName()); + List timeDims = modelResp.getTimeDimension(); + if (CollectionUtils.isNotEmpty(timeDims)) { + sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql, + TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(), + TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate()); + } + if (StringUtils.isNotBlank(queryDimValueReq.getValue())) { + sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%" + + queryDimValueReq.getValue() + "%'"; + } + querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId())); + querySqlReq.setSql(sql); + return queryByReq(querySqlReq, user); } @@ -272,18 +287,16 @@ public class S2SemanticLayerService implements SemanticLayerService { return metricService.getMetrics(metaFilter); } - private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user) - throws Exception { + private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user) { QueryStatement queryStatement = null; if (semanticQueryReq instanceof QuerySqlReq) { queryStatement = buildSqlQueryStatement((QuerySqlReq) semanticQueryReq, user); } if (semanticQueryReq instanceof QueryStructReq) { - queryStatement = buildStructQueryStatement((QueryStructReq) semanticQueryReq); + queryStatement = buildStructQueryStatement(semanticQueryReq); } if (semanticQueryReq instanceof QueryMultiStructReq) { - queryStatement = - buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq, user); + queryStatement = buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq); } if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo()) && StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) { @@ -300,77 +313,40 @@ public class S2SemanticLayerService implements SemanticLayerService { Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user); querySqlReq.setDataSetId(dataSetId); } - SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq); - SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); - return queryReqConverter.buildQueryStatement(querySqlReq, semanticSchemaResp); + + QueryStatement queryStatement = buildStructQueryStatement(querySqlReq); + queryStatement.setIsS2SQL(true); + queryStatement.setSql(querySqlReq.getSql()); + return queryStatement; } - private QueryStatement buildStructQueryStatement(QueryStructReq queryStructReq) { - SchemaFilterReq filter = buildSchemaFilterReq(queryStructReq); - SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter); + private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) { + SchemaFilterReq schemaFilterReq = new SchemaFilterReq(); + schemaFilterReq.setDataSetId(queryReq.getDataSetId()); + schemaFilterReq.setModelIds(queryReq.getModelIds()); + SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq); + QueryStatement queryStatement = new QueryStatement(); QueryParam queryParam = new QueryParam(); - BeanUtils.copyProperties(queryStructReq, queryParam); + BeanUtils.copyProperties(queryReq, queryParam); queryStatement.setQueryParam(queryParam); - queryStatement.setIsS2SQL(false); + queryStatement.setModelIds(queryReq.getModelIds()); queryStatement.setEnableOptimize(queryUtils.enableOptimize()); - queryStatement.setDataSetId(queryStructReq.getDataSetId()); + queryStatement.setDataSetId(queryReq.getDataSetId()); queryStatement.setSemanticSchemaResp(semanticSchemaResp); queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); return queryStatement; } - private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq, - User user) throws Exception { - List sqlParsers = new ArrayList<>(); + private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) { + List queryStatements = new ArrayList<>(); for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) { - QueryStatement queryStatement = buildQueryStatement(queryStructReq, user); - Ontology ontology = queryStatement.getOntology(); - queryStatement.setModelIds(queryStructReq.getModelIds()); - queryStatement.setOntology(ontology); - queryStatement.setEnableOptimize(queryUtils.enableOptimize()); + QueryStatement queryStatement = buildStructQueryStatement(queryStructReq); semanticTranslator.translate(queryStatement); - sqlParsers.add(queryStatement); + queryStatements.add(queryStatement); } - log.info("multi sqlParser:{}", sqlParsers); - return queryUtils.sqlParserUnion(queryMultiStructReq, sqlParsers); - } - - private SchemaFilterReq buildSchemaFilterReq(SemanticQueryReq semanticQueryReq) { - SchemaFilterReq schemaFilterReq = new SchemaFilterReq(); - schemaFilterReq.setDataSetId(semanticQueryReq.getDataSetId()); - schemaFilterReq.setModelIds(semanticQueryReq.getModelIds()); - return schemaFilterReq; - } - - private QuerySqlReq buildQuerySqlReq(DimensionValueReq queryDimValueReq) { - QuerySqlReq querySqlReq = new QuerySqlReq(); - List modelResps = - schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId())); - DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(), - queryDimValueReq.getModelId()); - ModelResp modelResp = modelResps.get(0); - String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(), - modelResp.getName()); - List timeDims = modelResp.getTimeDimension(); - if (CollectionUtils.isNotEmpty(timeDims)) { - sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql, - TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(), - TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate()); - } - if (StringUtils.isNotBlank(queryDimValueReq.getValue())) { - sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%" - + queryDimValueReq.getValue() + "%'"; - } - querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId())); - querySqlReq.setSql(sql); - return querySqlReq; - } - - private void queryPreCheck(QueryStatement queryStatement) { - // Check whether the dimensions of the metric drill-down are correct temporarily, - // add the abstraction of a validator later. - metricDrillDownChecker.checkQuery(queryStatement); + log.info("Union multiple query statements:{}", queryStatements); + return queryUtils.unionAll(queryMultiStructReq, queryStatements); } } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index b4a8cc25a..d0551f01a 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -19,7 +19,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; @@ -59,7 +59,6 @@ public class SemanticSchemaManager { public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) { Ontology ontology = new Ontology(); - ontology.setSchemaKey(semanticSchemaResp.getSchemaKey()); Map> dimensionYamlTpls = new HashMap<>(); List dataModelYamlTpls = new ArrayList<>(); List metricYamlTpls = new ArrayList<>(); @@ -177,7 +176,7 @@ public class SemanticSchemaManager { } public static DataModel getDatasource(final DataModelYamlTpl d) { - DataModel datasource = DataModel.builder().id(d.getId()).sourceId(d.getSourceId()) + DataModel datasource = DataModel.builder().id(d.getId()).modelId(d.getSourceId()) .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName()) .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers())) .measures(getMeasureParams(d.getMeasures())) @@ -354,13 +353,13 @@ public class SemanticSchemaManager { return joinRelations; } - public static void update(S2SemanticSchema schema, List metric) throws Exception { + public static void update(S2CalciteSchema schema, List metric) throws Exception { if (schema != null) { updateMetric(metric, schema.getMetrics()); } } - public static void update(S2SemanticSchema schema, DataModel datasourceYamlTpl) + public static void update(S2CalciteSchema schema, DataModel datasourceYamlTpl) throws Exception { if (schema != null) { String dataSourceName = datasourceYamlTpl.getName(); @@ -375,7 +374,7 @@ public class SemanticSchemaManager { } } - public static void update(S2SemanticSchema schema, String datasourceBizName, + public static void update(S2CalciteSchema schema, String datasourceBizName, List dimensionYamlTpls) throws Exception { if (schema != null) { Optional>> datasourceYamlTplMap = schema diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java index 74470a188..12caf5b77 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java @@ -32,7 +32,7 @@ public class MetricDrillDownChecker { public void checkQuery(QueryStatement queryStatement) { SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - String sql = queryStatement.getDataSetQueryParam().getSql(); + String sql = queryStatement.getSql(); if (StringUtils.isBlank(sql)) { return; } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java deleted file mode 100644 index 677743985..000000000 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryReqConverter.java +++ /dev/null @@ -1,378 +0,0 @@ -package com.tencent.supersonic.headless.server.utils; - -import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper; -import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; -import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; -import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; -import com.tencent.supersonic.common.pojo.Aggregator; -import com.tencent.supersonic.common.pojo.Constants; -import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; -import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.common.pojo.enums.QueryType; -import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; -import com.tencent.supersonic.headless.api.pojo.Measure; -import com.tencent.supersonic.headless.api.pojo.MetricTable; -import com.tencent.supersonic.headless.api.pojo.QueryParam; -import com.tencent.supersonic.headless.api.pojo.SchemaItem; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; -import com.tencent.supersonic.headless.api.pojo.enums.MetricType; -import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; -import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; -import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; -import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.ModelResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; -import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor; -import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory; -import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; -import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; -import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.Pair; -import org.springframework.beans.BeanUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; - -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -@Component -@Slf4j -public class QueryReqConverter { - - @Autowired - private QueryStructUtils queryStructUtils; - - @Autowired - private SqlGenerateUtils sqlGenerateUtils; - - @Autowired - private QueryUtils queryUtils; - - @Autowired - private SemanticSchemaManager semanticSchemaManager; - - public QueryStatement buildQueryStatement(QuerySqlReq querySQLReq, - SemanticSchemaResp semanticSchemaResp) { - - if (semanticSchemaResp == null) { - return new QueryStatement(); - } - // 1.convert name to bizName - convertNameToBizName(querySQLReq, semanticSchemaResp); - // 2.functionName corrector - functionNameCorrector(querySQLReq, semanticSchemaResp); - // 3.correct tableName - correctTableName(querySQLReq); - // 4.remove Underscores - querySQLReq.setSql(SqlRemoveHelper.removeUnderscores(querySQLReq.getSql())); - - String tableName = SqlSelectHelper.getTableName(querySQLReq.getSql()); - if (StringUtils.isEmpty(tableName)) { - return new QueryStatement(); - } - // correct order item is same as agg alias - String reqSql = querySQLReq.getSql(); - querySQLReq.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(querySQLReq.getSql())); - log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, querySQLReq.getSql()); - // 5.build MetricTables - List allFields = SqlSelectHelper.getAllSelectFields(querySQLReq.getSql()); - List metricSchemas = getMetrics(semanticSchemaResp, allFields); - List metrics = - metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList()); - QueryStructReq queryStructReq = new QueryStructReq(); - - MetricTable metricTable = new MetricTable(); - metricTable.getMetrics().addAll(metrics); - Set dimensions = getDimensions(semanticSchemaResp, allFields); - metricTable.getDimensions().addAll(dimensions); - metricTable.setAlias(tableName.toLowerCase()); - // if metric empty , fill model default - if (CollectionUtils.isEmpty(metricTable.getMetrics())) { - metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( - getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); - } else { - queryStructReq.setAggregators(metricTable.getMetrics().stream() - .map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN)) - .collect(Collectors.toList())); - } - AggOption aggOption = getAggOption(querySQLReq, metricSchemas); - metricTable.setAggOption(aggOption); - List tables = new ArrayList<>(); - tables.add(metricTable); - // 6.build ParseSqlReq - DataSetQueryParam result = new DataSetQueryParam(); - BeanUtils.copyProperties(querySQLReq, result); - - result.setTables(tables); - DatabaseResp database = semanticSchemaResp.getDatabaseResp(); - if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()), - database.getVersion())) { - result.setSupportWith(false); - result.setWithAlias(false); - } - // 7. do deriveMetric - generateDerivedMetric(semanticSchemaResp, aggOption, result); - - // 8.physicalSql by ParseSqlReq - queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(querySQLReq.getSql())); - queryStructReq.setDataSetId(querySQLReq.getDataSetId()); - queryStructReq.setQueryType(getQueryType(aggOption)); - log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq); - QueryParam queryParam = new QueryParam(); - BeanUtils.copyProperties(queryStructReq, queryParam); - - QueryStatement queryStatement = new QueryStatement(); - queryStatement.setQueryParam(queryParam); - queryStatement.setDataSetQueryParam(result); - queryStatement.setIsS2SQL(true); - queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq)); - queryStatement.setDataSetId(querySQLReq.getDataSetId()); - queryStatement.setLimit(querySQLReq.getLimit()); - queryStatement.setModelIds(querySQLReq.getModelIds()); - queryStatement.setEnableOptimize(queryUtils.enableOptimize()); - queryStatement.setSemanticSchemaResp(semanticSchemaResp); - queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); - - return queryStatement; - } - - private AggOption getAggOption(QuerySqlReq databaseReq, List metricSchemas) { - String sql = databaseReq.getSql(); - if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) - && !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) { - log.debug("getAggOption simple sql set to DEFAULT"); - return AggOption.DEFAULT; - } - // if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE" - // if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE" - if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) - || SqlSelectFunctionHelper.hasFunction(sql, "count") - || SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) { - return AggOption.OUTER; - } - if (databaseReq.isInnerLayerNative()) { - return AggOption.NATIVE; - } - if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql) - || SqlSelectHelper.hasGroupBy(sql)) { - return AggOption.OUTER; - } - long defaultAggNullCnt = metricSchemas.stream().filter( - m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg())) - .count(); - if (defaultAggNullCnt > 0) { - log.debug("getAggOption find null defaultAgg metric set to NATIVE"); - return AggOption.OUTER; - } - return AggOption.DEFAULT; - } - - private void convertNameToBizName(QuerySqlReq querySqlReq, - SemanticSchemaResp semanticSchemaResp) { - Map fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); - String sql = querySqlReq.getSql(); - log.debug("dataSetId:{},convert name to bizName before:{}", querySqlReq.getDataSetId(), - sql); - sql = SqlReplaceHelper.replaceSqlByPositions(sql); - log.debug("replaceSqlByPositions:{}", sql); - String replaceFields = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); - log.debug("dataSetId:{},convert name to bizName after:{}", querySqlReq.getDataSetId(), - replaceFields); - querySqlReq.setSql(replaceFields); - } - - private Set getDimensions(SemanticSchemaResp semanticSchemaResp, - List allFields) { - Map dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream() - .collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), - SchemaItem::getBizName, (k1, k2) -> k1)); - Map internalLowerToNameMap = QueryStructUtils.internalCols.stream() - .collect(Collectors.toMap(String::toLowerCase, a -> a)); - dimensionLowerToNameMap.putAll(internalLowerToNameMap); - return allFields.stream() - .filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase())) - .map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase())) - .collect(Collectors.toSet()); - } - - private List getMetrics(SemanticSchemaResp semanticSchemaResp, - List allFields) { - Map metricLowerToNameMap = - semanticSchemaResp.getMetrics().stream().collect(Collectors - .toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry)); - return allFields.stream() - .filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase())) - .map(entry -> metricLowerToNameMap.get(entry.toLowerCase())) - .collect(Collectors.toList()); - } - - private void functionNameCorrector(QuerySqlReq databaseReq, - SemanticSchemaResp semanticSchemaResp) { - DatabaseResp database = semanticSchemaResp.getDatabaseResp(); - if (Objects.isNull(database) || Objects.isNull(database.getType())) { - return; - } - String type = database.getType(); - DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase()); - if (Objects.nonNull(engineAdaptor)) { - String functionNameCorrector = - engineAdaptor.functionNameCorrector(databaseReq.getSql()); - databaseReq.setSql(functionNameCorrector); - } - } - - protected Map getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) { - // support fieldName and field alias to bizName - Map dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap( - entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); - - Map metricResults = semanticSchemaResp.getMetrics().stream().flatMap( - entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); - - dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap()); - dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap()); - dimensionResults.putAll(metricResults); - return dimensionResults; - } - - private Stream> getPairStream(String aliasStr, String name, - String bizName) { - Set> elements = new HashSet<>(); - elements.add(Pair.of(name, bizName)); - if (StringUtils.isNotBlank(aliasStr)) { - List aliasList = SchemaItem.getAliasList(aliasStr); - for (String alias : aliasList) { - elements.add(Pair.of(alias, bizName)); - } - } - return elements.stream(); - } - - public void correctTableName(QuerySqlReq querySqlReq) { - String sql = querySqlReq.getSql(); - sql = SqlReplaceHelper.replaceTable(sql, - Constants.TABLE_PREFIX + querySqlReq.getDataSetId()); - log.debug("correctTableName after:{}", sql); - querySqlReq.setSql(sql); - } - - private QueryType getQueryType(AggOption aggOption) { - boolean isAgg = AggOption.isAgg(aggOption); - QueryType queryType = QueryType.DETAIL; - if (isAgg) { - queryType = QueryType.AGGREGATE; - } - return queryType; - } - - private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption, - DataSetQueryParam viewQueryParam) { - String sql = viewQueryParam.getSql(); - for (MetricTable metricTable : viewQueryParam.getTables()) { - Set measures = new HashSet<>(); - Map replaces = generateDerivedMetric(semanticSchemaResp, aggOption, - metricTable.getMetrics(), metricTable.getDimensions(), measures); - - if (!CollectionUtils.isEmpty(replaces)) { - // metricTable sql use measures replace metric - sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces); - metricTable.setAggOption(AggOption.NATIVE); - // metricTable use measures replace metric - if (!CollectionUtils.isEmpty(measures)) { - metricTable.setMetrics(new ArrayList<>(measures)); - } else { - // empty measure , fill default - metricTable.setMetrics(new ArrayList<>()); - metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( - getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); - } - } - } - viewQueryParam.setSql(sql); - } - - private Map generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, - AggOption aggOption, List metrics, List dimensions, - Set measures) { - Map result = new HashMap<>(); - List metricResps = semanticSchemaResp.getMetrics(); - List dimensionResps = semanticSchemaResp.getDimensions(); - - // Check if any metric is derived - boolean hasDerivedMetrics = - metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType - .isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams())); - if (!hasDerivedMetrics) { - return result; - } - - log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics); - - Set allFields = new HashSet<>(); - Map allMeasures = new HashMap<>(); - semanticSchemaResp.getModelResps().forEach(modelResp -> { - allFields.addAll(modelResp.getFieldList()); - if (modelResp.getModelDetail().getMeasures() != null) { - modelResp.getModelDetail().getMeasures() - .forEach(measure -> allMeasures.put(measure.getBizName(), measure)); - } - }); - - Set derivedDimensions = new HashSet<>(); - Set derivedMetrics = new HashSet<>(); - Map visitedMetrics = new HashMap<>(); - - for (MetricResp metricResp : metricResps) { - if (metrics.contains(metricResp.getBizName())) { - boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(), - metricResp.getMetricDefineByMeasureParams()); - if (isDerived) { - String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields, - allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp), - metricResp.getMetricDefineType(), aggOption, visitedMetrics, - derivedMetrics, derivedDimensions); - result.put(metricResp.getBizName(), expr); - log.debug("derived metric {}->{}", metricResp.getBizName(), expr); - } else { - measures.add(metricResp.getBizName()); - } - } - } - - measures.addAll(derivedMetrics); - derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension)) - .forEach(dimensions::add); - - return result; - } - - private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List dimensions) { - if (!CollectionUtils.isEmpty(dimensions)) { - Map modelMatchCnt = new HashMap<>(); - for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { - modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions() - .stream().filter(d -> dimensions.contains(d.getBizName())).count()); - } - return modelMatchCnt.entrySet().stream() - .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) - .map(m -> m.getKey()).findFirst().orElse(""); - } - return semanticSchemaResp.getModelResps().get(0).getBizName(); - } -} diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryUtils.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryUtils.java index 597b59c9a..150bb3cda 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryUtils.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/QueryUtils.java @@ -140,15 +140,15 @@ public class QueryUtils { return null; } - public QueryStatement sqlParserUnion(QueryMultiStructReq queryMultiStructCmd, - List sqlParsers) { + public QueryStatement unionAll(QueryMultiStructReq queryMultiStructCmd, + List queryStatements) { QueryStatement sqlParser = new QueryStatement(); StringBuilder unionSqlBuilder = new StringBuilder(); - for (int i = 0; i < sqlParsers.size(); i++) { + for (int i = 0; i < queryStatements.size(); i++) { String selectStr = SqlGenerateUtils .getUnionSelect(queryMultiStructCmd.getQueryStructReqs().get(i)); unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s", selectStr, - sqlParsers.get(i).getSql(), i)); + queryStatements.get(i).getSql(), i)); unionSqlBuilder.append(UNIONALL); } String unionSql = unionSqlBuilder.substring(0, diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index fa7896f3a..d519ba8dd 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -6,8 +6,8 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp; import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner; -import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; +import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; @@ -20,16 +20,12 @@ import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; -import java.util.Map; @Slf4j class HeadlessParserServiceTest { - private static Map headlessSchemaMap = new HashMap<>(); - - public static SqlParserResp parser(S2SemanticSchema semanticSchema, + public static SqlParserResp parser(S2CalciteSchema semanticSchema, MetricQueryParam metricQueryParam, boolean isAgg) { SqlParserResp sqlParser = new SqlParserResp(); try { @@ -37,14 +33,13 @@ class HeadlessParserServiceTest { sqlParser.setErrMsg("headlessSchema not found"); return sqlParser; } - AggPlanner aggBuilder = new AggPlanner(semanticSchema); + SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); QueryStatement queryStatement = new QueryStatement(); queryStatement.setMetricQueryParam(metricQueryParam); - aggBuilder.plan(queryStatement, AggOption.getAggregation(!isAgg)); - EngineType engineType = EngineType - .fromString(semanticSchema.getSemanticModel().getDatabase().getType()); + aggBuilder.build(queryStatement, AggOption.getAggregation(!isAgg)); + EngineType engineType = + EngineType.fromString(semanticSchema.getOntology().getDatabase().getType()); sqlParser.setSql(aggBuilder.getSql(engineType)); - sqlParser.setSourceId(aggBuilder.getSourceId()); } catch (Exception e) { sqlParser.setErrMsg(e.getMessage()); log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e); @@ -122,7 +117,7 @@ class HeadlessParserServiceTest { identify.setType("primary"); identifies.add(identify); datasource.setIdentifiers(identifies); - S2SemanticSchema semanticSchema = S2SemanticSchema.newBuilder("1").build(); + S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build(); SemanticSchemaManager.update(semanticSchema, SemanticSchemaManager.getDatasource(datasource)); @@ -192,7 +187,7 @@ class HeadlessParserServiceTest { System.out.println(parser(semanticSchema, metricCommand2, true)); } - private static void addDepartment(S2SemanticSchema semanticSchema) { + private static void addDepartment(S2CalciteSchema semanticSchema) { DataModelYamlTpl datasource = new DataModelYamlTpl(); datasource.setName("user_department"); datasource.setSourceId(1L); diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index 87b282372..7f7cb56b4 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -134,7 +134,7 @@ public class S2VisitsDemo extends S2BaseDemo { private void addSampleChats(Integer agentId) { Long chatId = chatManageService.addChat(defaultUser, "样例对话1", agentId); - submitText(chatId.intValue(), agentId, "超音数 访问次数"); + submitText(chatId.intValue(), agentId, "访问过超音数的部门有哪些"); submitText(chatId.intValue(), agentId, "按部门统计近7天访问次数"); submitText(chatId.intValue(), agentId, "alice 停留时长"); } @@ -446,7 +446,7 @@ public class S2VisitsDemo extends S2BaseDemo { termReq1.setDescription("用户为tom和lucy"); termReq1.setAlias(Lists.newArrayList("VIP用户")); termReq1.setDomainId(s2Domain.getId()); - termService.saveOrUpdate(termReq, defaultUser); + termService.saveOrUpdate(termReq1, defaultUser); } private void addAuthGroup_1(ModelResp stayTimeModel) { From be05f977d50255743e1e2a23a1fcaa0e7943fdc2 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Thu, 21 Nov 2024 09:09:24 +0800 Subject: [PATCH 08/88] [improvement][headless]Clean code logic of headless translator. --- .../translator/calcite/s2sql/Ontology.java | 4 +-- .../calcite/sql/S2CalciteSchema.java | 14 ++++---- .../translator/calcite/sql/SqlBuilder.java | 2 +- .../calcite/sql/node/DataModelNode.java | 24 ++++++------- .../calcite/sql/render/JoinRender.java | 4 +-- .../calcite/sql/render/SourceRender.java | 12 +++---- .../converter/SqlVariableParseConverter.java | 2 +- .../server/manager/SemanticSchemaManager.java | 35 +++++++++---------- .../calcite/HeadlessParserServiceTest.java | 8 ++--- 9 files changed, 52 insertions(+), 53 deletions(-) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java index fc5564ccf..da2e21698 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java @@ -14,7 +14,7 @@ import java.util.stream.Collectors; public class Ontology { private List metrics = new ArrayList<>(); - private Map datasourceMap = new HashMap<>(); + private Map dataModelMap = new HashMap<>(); private Map> dimensionMap = new HashMap<>(); private List materializationList = new ArrayList<>(); private List joinRelations; @@ -26,7 +26,7 @@ public class Ontology { } public Map getModelMap() { - return datasourceMap.values().stream() + return dataModelMap.values().stream() .collect(Collectors.toMap(DataModel::getId, dataSource -> dataSource)); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java index a9de834a3..e1559809f 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/S2CalciteSchema.java @@ -29,11 +29,15 @@ public class S2CalciteSchema extends AbstractSchema { return this; } - public Map getDatasource() { - return ontology.getDatasourceMap(); + public Map getDataModels() { + return ontology.getDataModelMap(); } - public Map> getDimension() { + public List getMetrics() { + return ontology.getMetrics(); + } + + public Map> getDimensions() { return ontology.getDimensionMap(); } @@ -41,8 +45,4 @@ public class S2CalciteSchema extends AbstractSchema { return ontology.getJoinRelations(); } - public List getMetrics() { - return ontology.getMetrics(); - } - } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index f29f4ced5..971886ed4 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -32,8 +32,8 @@ import java.util.Objects; @Slf4j public class SqlBuilder { - private MetricQueryParam metricQueryParam; private final S2CalciteSchema schema; + private MetricQueryParam metricQueryParam; private SqlValidatorScope scope; private SqlNode parserNode; private boolean isAgg = false; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index 105952132..73c5c422e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -202,7 +202,7 @@ public class DataModelNode extends SemanticNode { DataModel baseDataModel = null; // one , match measure count Map dataSourceMeasures = new HashMap<>(); - for (Map.Entry entry : schema.getDatasource().entrySet()) { + for (Map.Entry entry : schema.getDataModels().entrySet()) { Set sourceMeasure = entry.getValue().getMeasures().stream() .map(mm -> mm.getName()).collect(Collectors.toSet()); sourceMeasure.retainAll(measures); @@ -212,7 +212,7 @@ public class DataModelNode extends SemanticNode { Optional> base = dataSourceMeasures.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); if (base.isPresent()) { - baseDataModel = schema.getDatasource().get(base.get().getKey()); + baseDataModel = schema.getDataModels().get(base.get().getKey()); dataModels.add(baseDataModel); } // second , check match all dimension and metric @@ -223,8 +223,8 @@ public class DataModelNode extends SemanticNode { Set dimension = baseDataModel.getDimensions().stream().map(dd -> dd.getName()) .collect(Collectors.toSet()); baseDataModel.getIdentifiers().stream().forEach(i -> dimension.add(i.getName())); - if (schema.getDimension().containsKey(baseDataModel.getName())) { - schema.getDimension().get(baseDataModel.getName()).stream() + if (schema.getDimensions().containsKey(baseDataModel.getName())) { + schema.getDimensions().get(baseDataModel.getName()).stream() .forEach(d -> dimension.add(d.getName())); } filterMeasure.addAll(sourceMeasure); @@ -319,8 +319,8 @@ public class DataModelNode extends SemanticNode { } boolean isMatch = false; boolean isRight = before.contains(joinRelation.getLeft()); - DataModel other = isRight ? schema.getDatasource().get(joinRelation.getRight()) - : schema.getDatasource().get(joinRelation.getLeft()); + DataModel other = isRight ? schema.getDataModels().get(joinRelation.getRight()) + : schema.getDataModels().get(joinRelation.getLeft()); if (!queryDimension.isEmpty()) { Set linkDimension = other.getDimensions().stream() .map(dd -> dd.getName()).collect(Collectors.toSet()); @@ -336,8 +336,8 @@ public class DataModelNode extends SemanticNode { if (!linkMeasure.isEmpty()) { isMatch = true; } - if (!isMatch && schema.getDimension().containsKey(other.getName())) { - Set linkDimension = schema.getDimension().get(other.getName()).stream() + if (!isMatch && schema.getDimensions().containsKey(other.getName())) { + Set linkDimension = schema.getDimensions().get(other.getName()).stream() .map(dd -> dd.getName()).collect(Collectors.toSet()); linkDimension.retainAll(queryDimension); if (!linkDimension.isEmpty()) { @@ -362,7 +362,7 @@ public class DataModelNode extends SemanticNode { } } orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> { - linkDataModels.add(schema.getDatasource().get(d.getKey())); + linkDataModels.add(schema.getDataModels().get(d.getKey())); }); } return linkDataModels; @@ -388,7 +388,7 @@ public class DataModelNode extends SemanticNode { S2CalciteSchema schema) { Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); - for (Map.Entry entry : schema.getDatasource().entrySet()) { + for (Map.Entry entry : schema.getDataModels().entrySet()) { if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) { continue; } @@ -419,7 +419,7 @@ public class DataModelNode extends SemanticNode { } } } - for (Map.Entry> entry : schema.getDimension().entrySet()) { + for (Map.Entry> entry : schema.getDimensions().entrySet()) { if (!queryDimension.isEmpty()) { Set linkDimension = entry.getValue().stream().map(dd -> dd.getName()) .collect(Collectors.toSet()); @@ -430,7 +430,7 @@ public class DataModelNode extends SemanticNode { } } for (String linkName : linkDataSourceName) { - linkDataModels.add(schema.getDatasource().get(linkName)); + linkDataModels.add(schema.getDataModels().get(linkName)); } if (!CollectionUtils.isEmpty(linkDataModels)) { List all = new ArrayList<>(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index e1d745e1a..a74b85934 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -241,8 +241,8 @@ public class JoinRender extends Renderer { break; } } - if (schema.getDimension().containsKey(dataModel.getName())) { - for (Dimension dim : schema.getDimension().get(dataModel.getName())) { + if (schema.getDimensions().containsKey(dataModel.getName())) { + for (Dimension dim : schema.getDimensions().get(dataModel.getName())) { if (dim.getName().equalsIgnoreCase(oriDimension)) { isAdd = true; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index 428d5e6d4..c4eb0d24c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -108,7 +108,7 @@ public class SourceRender extends Renderer { private static void buildDimension(String alias, String dimension, DataModel datasource, S2CalciteSchema schema, boolean nonAgg, Map extendFields, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { - List dimensionList = schema.getDimension().get(datasource.getName()); + List dimensionList = schema.getDimensions().get(datasource.getName()); EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); boolean isAdd = false; if (!CollectionUtils.isEmpty(dimensionList)) { @@ -195,7 +195,7 @@ public class SourceRender extends Renderer { } } for (String where : fields) { - List dimensionList = schema.getDimension().get(datasource.getName()); + List dimensionList = schema.getDimensions().get(datasource.getName()); boolean isAdd = false; if (!CollectionUtils.isEmpty(dimensionList)) { for (Dimension dim : dimensionList) { @@ -262,8 +262,8 @@ public class SourceRender extends Renderer { dimensions.add(oriField); return; } - if (schema.getDimension().containsKey(datasource.getName())) { - Optional dataSourceDim = schema.getDimension().get(datasource.getName()) + if (schema.getDimensions().containsKey(datasource.getName())) { + Optional dataSourceDim = schema.getDimensions().get(datasource.getName()) .stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst(); if (dataSourceDim.isPresent()) { dimensions.add(oriField); @@ -300,8 +300,8 @@ public class SourceRender extends Renderer { if (identify.isPresent()) { return true; } - if (schema.getDimension().containsKey(datasource.getName())) { - Optional dataSourceDim = schema.getDimension().get(datasource.getName()) + if (schema.getDimensions().containsKey(datasource.getName())) { + Optional dataSourceDim = schema.getDimensions().get(datasource.getName()) .stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); if (dataSourceDim.isPresent()) { return true; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java index a73f4ecf5..e2c5f9a06 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java @@ -40,7 +40,7 @@ public class SqlVariableParseConverter implements QueryConverter { modelResp.getModelDetail().getSqlVariables(), queryStatement.getQueryParam().getParams()); DataModel dataModel = - queryStatement.getOntology().getDatasourceMap().get(modelResp.getBizName()); + queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); dataModel.setSqlQuery(sqlParsed); } } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index d0551f01a..7e5eff936 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -72,10 +72,10 @@ public class SemanticSchemaManager { getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName)); } if (!dataModelYamlTpls.isEmpty()) { - Map dataSourceMap = - dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect( + Map dataModelMap = + dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect( Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1)); - ontology.setDatasourceMap(dataSourceMap); + ontology.setDataModelMap(dataModelMap); } if (!dimensionYamlTpls.isEmpty()) { Map> dimensionMap = new HashMap<>(); @@ -103,9 +103,8 @@ public class SemanticSchemaManager { } tagMap.get(tagResp.getModelId()).add(tagResp); } - if (Objects.nonNull(ontology.getDatasourceMap()) - && !ontology.getDatasourceMap().isEmpty()) { - for (Map.Entry entry : ontology.getDatasourceMap().entrySet()) { + if (Objects.nonNull(ontology.getDataModelMap()) && !ontology.getDataModelMap().isEmpty()) { + for (Map.Entry entry : ontology.getDataModelMap().entrySet()) { List modelDimensions = new ArrayList<>(); if (!ontology.getDimensionMap().containsKey(entry.getKey())) { ontology.getDimensionMap().put(entry.getKey(), modelDimensions); @@ -175,30 +174,30 @@ public class SemanticSchemaManager { return getDimension(t); } - public static DataModel getDatasource(final DataModelYamlTpl d) { - DataModel datasource = DataModel.builder().id(d.getId()).modelId(d.getSourceId()) + public static DataModel getDataModel(final DataModelYamlTpl d) { + DataModel dataModel = DataModel.builder().id(d.getId()).modelId(d.getSourceId()) .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName()) .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers())) .measures(getMeasureParams(d.getMeasures())) .dimensions(getDimensions(d.getDimensions())).build(); - datasource.setAggTime(getDataSourceAggTime(datasource.getDimensions())); + dataModel.setAggTime(getDataModelAggTime(dataModel.getDimensions())); if (Objects.nonNull(d.getModelSourceTypeEnum())) { - datasource.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); + dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); } if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) { - Set measures = datasource.getMeasures().stream().map(mm -> mm.getName()) + Set measures = dataModel.getMeasures().stream().map(mm -> mm.getName()) .collect(Collectors.toSet()); for (Field f : d.getFields()) { if (!measures.contains(f.getFieldName())) { - datasource.getMeasures().add(Measure.builder().expr(f.getFieldName()) + dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName()) .name(f.getFieldName()).agg("").build()); } } } - return datasource; + return dataModel; } - private static String getDataSourceAggTime(List dimensions) { + private static String getDataModelAggTime(List dimensions) { Optional timeDimension = dimensions.stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .findFirst(); @@ -364,12 +363,12 @@ public class SemanticSchemaManager { if (schema != null) { String dataSourceName = datasourceYamlTpl.getName(); Optional> datasourceYamlTplMap = - schema.getDatasource().entrySet().stream() + schema.getDataModels().entrySet().stream() .filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst(); if (datasourceYamlTplMap.isPresent()) { datasourceYamlTplMap.get().setValue(datasourceYamlTpl); } else { - schema.getDatasource().put(dataSourceName, datasourceYamlTpl); + schema.getDataModels().put(dataSourceName, datasourceYamlTpl); } } } @@ -378,14 +377,14 @@ public class SemanticSchemaManager { List dimensionYamlTpls) throws Exception { if (schema != null) { Optional>> datasourceYamlTplMap = schema - .getDimension().entrySet().stream() + .getDimensions().entrySet().stream() .filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst(); if (datasourceYamlTplMap.isPresent()) { updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue()); } else { List dimensions = new ArrayList<>(); updateDimension(dimensionYamlTpls, dimensions); - schema.getDimension().put(datasourceBizName, dimensions); + schema.getDimensions().put(datasourceBizName, dimensions); } } } diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index d519ba8dd..656ccd081 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -120,7 +120,7 @@ class HeadlessParserServiceTest { S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build(); SemanticSchemaManager.update(semanticSchema, - SemanticSchemaManager.getDatasource(datasource)); + SemanticSchemaManager.getDataModel(datasource)); DimensionYamlTpl dimension1 = new DimensionYamlTpl(); dimension1.setExpr("page"); @@ -233,8 +233,8 @@ class HeadlessParserServiceTest { identifies.add(identify); datasource.setIdentifiers(identifies); - semanticSchema.getDatasource().put("user_department", - SemanticSchemaManager.getDatasource(datasource)); + semanticSchema.getDataModels().put("user_department", + SemanticSchemaManager.getDataModel(datasource)); DimensionYamlTpl dimension1 = new DimensionYamlTpl(); dimension1.setExpr("department"); @@ -243,7 +243,7 @@ class HeadlessParserServiceTest { List dimensionYamlTpls = new ArrayList<>(); dimensionYamlTpls.add(dimension1); - semanticSchema.getDimension().put("user_department", + semanticSchema.getDimensions().put("user_department", SemanticSchemaManager.getDimensions(dimensionYamlTpls)); } } From 62fc2dd18a29d252ed452813064a11e085e3bede Mon Sep 17 00:00:00 2001 From: tristanliu Date: Thu, 21 Nov 2024 18:09:31 +0800 Subject: [PATCH 09/88] [improvement][headless-fe] Added permissions management for agents. --- .../src/components/SelectTMEPerson/index.less | 9 +++++++++ .../src/components/SelectTMEPerson/index.tsx | 1 + .../supersonic-fe/src/pages/Agent/AgentForm.tsx | 13 ++++++++++++- 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.less b/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.less index 4067901a5..ec93cc852 100644 --- a/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.less +++ b/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.less @@ -7,3 +7,12 @@ .userText { margin-left: 10px; } + +.selectPerson { + :global { + .ant-select-selection-item { + color: rgba(0, 0, 0, 0.88)!important; + background-color: rgba(0, 0, 0, 0.06)!important; + } + } +} \ No newline at end of file diff --git a/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.tsx b/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.tsx index 7c50c8c91..0a1f5d20b 100644 --- a/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.tsx +++ b/webapp/packages/supersonic-fe/src/components/SelectTMEPerson/index.tsx @@ -37,6 +37,7 @@ const SelectTMEPerson: FC = ({ placeholder, value, isMultiple = true, onC mode={isMultiple ? 'multiple' : undefined} allowClear showSearch + className={styles.selectPerson} onChange={onChange} > {userList.map((item) => { diff --git a/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx b/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx index 6c4c00e60..fa863be21 100644 --- a/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx @@ -8,6 +8,8 @@ import { uuid, jsonParse } from '@/utils/utils'; import ToolsSection from './ToolsSection'; import globalStyles from '@/global.less'; import { QuestionCircleOutlined } from '@ant-design/icons'; +import SelectTMEPerson from '@/components/SelectTMEPerson'; +import FormItemTitle from '@/components/FormHelper/FormItemTitle'; import { getLlmModelTypeList, getLlmModelAppList, getLlmList } from '../../services/system'; import MemorySection from './MemorySection'; @@ -223,7 +225,16 @@ const AgentForm: React.FC = ({ editAgent, onSaveAgent, onCreateToolBtnCli > - + + // } + > + +
{examples.map((example) => { From 46d64d78f3ee8421195fca9c1d5bc381f4571b2c Mon Sep 17 00:00:00 2001 From: tristanliu Date: Fri, 22 Nov 2024 10:24:08 +0800 Subject: [PATCH 10/88] [improvement][headless-fe] Unified the assistant's permission settings interaction to match the system style. --- .../src/pages/Agent/AgentForm.tsx | 12 +++--- .../Datasource/components/ModelFieldForm.tsx | 40 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx b/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx index fa863be21..4139478c5 100644 --- a/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/Agent/AgentForm.tsx @@ -226,15 +226,15 @@ const AgentForm: React.FC = ({ editAgent, onSaveAgent, onCreateToolBtnCli - // } + name="admins" + label="管理员" + // rules={[{ required: true, message: '请设定数据库连接管理者' }]} > + + +
{examples.map((example) => { diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Datasource/components/ModelFieldForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Datasource/components/ModelFieldForm.tsx index bf412100e..29d0bdef6 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Datasource/components/ModelFieldForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Datasource/components/ModelFieldForm.tsx @@ -208,26 +208,26 @@ const ModelFieldForm: React.FC = ({ // width: 200, render: (_: any, record: FieldItem) => { const { type } = record; - if (type === EnumDataSourceType.PRIMARY) { - return ( - - { + // onTagObjectChange?.(value); + // }} + // options={tagObjectList.map((item: ISemantic.ITagObjectItem) => { + // return { + // label: item.name, + // value: item.id, + // }; + // })} + // /> + // + // ); + // } if (type === EnumDataSourceType.MEASURES) { const agg = record.expr ? fields.find((field) => field.expr === record.expr)?.agg From 0edadd01eb04dd229f06a26044f10daa62f90279 Mon Sep 17 00:00:00 2001 From: tristanliu Date: Sat, 23 Nov 2024 17:23:16 +0800 Subject: [PATCH 11/88] [improvement][headless-fe] route rebuild stash --- .../packages/supersonic-fe/config/routes.ts | 69 ++++++- .../pages/SemanticModel/Dimension/index.tsx | 12 ++ .../pages/SemanticModel/Dimension/style.less | 0 .../src/pages/SemanticModel/DomainManager.tsx | 108 +++++++++- .../components/MetricInfoCreateForm.tsx | 4 +- .../src/pages/SemanticModel/ModelManager.tsx | 71 +++++++ ...ewContainer.tsx => OverviewContainer1.tsx} | 5 +- .../SemanticModel/OverviewContainerRight.tsx | 62 ++++++ .../View/components/DataSetTable.tsx | 16 +- .../src/pages/SemanticModel/View/index.tsx | 5 +- .../components/ClassMetricTable.tsx | 6 +- .../components/DomainManagerTab.tsx | 92 ++------- .../components/MetricInfoCreateForm.tsx | 4 +- .../components/ModelManagerTab.tsx | 102 ++++++++++ .../components/ModelMetric/index.tsx | 12 ++ .../components/ModelMetric/style.less | 0 .../SemanticModel/components/ModelTable.tsx | 151 +++++++------- .../components/TableColumnRender.tsx | 9 +- .../src/pages/SemanticModel/index.tsx | 190 +++++++++++++++++- 19 files changed, 735 insertions(+), 183 deletions(-) create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/index.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/style.less create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx rename webapp/packages/supersonic-fe/src/pages/SemanticModel/{OverviewContainer.tsx => OverviewContainer1.tsx} (97%) create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainerRight.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/index.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/style.less diff --git a/webapp/packages/supersonic-fe/config/routes.ts b/webapp/packages/supersonic-fe/config/routes.ts index 0326eabf3..946e72eca 100644 --- a/webapp/packages/supersonic-fe/config/routes.ts +++ b/webapp/packages/supersonic-fe/config/routes.ts @@ -57,24 +57,81 @@ const ROUTES = [ }, { path: '/model/', - component: './SemanticModel/DomainManager', + component: './SemanticModel/', name: 'semanticModel', envEnableList: [ENV_KEY.SEMANTIC], routes: [ { - path: '/model/:domainId/:modelId', + path: '/model/:domainId', component: './SemanticModel/DomainManager', - // name: 'semanticModel', envEnableList: [ENV_KEY.SEMANTIC], + routes: [ + { + path: '/model/:domainId/:menuKey', + component: './SemanticModel/DomainManager', + }, + ], }, { - path: '/model/:domainId/:modelId/:menuKey', - component: './SemanticModel/DomainManager', - // name: 'semanticModel', + path: '/model/manager/:domainId/:modelId', + component: './SemanticModel/ModelManager', envEnableList: [ENV_KEY.SEMANTIC], + routes: [ + { + path: '/model/manager/:domainId/:modelId/:menuKey', + component: './SemanticModel/ModelManager', + }, + ], }, + // { + // path: '/model/:domainId/:modelId/:menuKey', + // component: './SemanticModel/DomainManager', + // envEnableList: [ENV_KEY.SEMANTIC], + // }, + // { + // path: '/model/:domainId/:modelId/metric', + // component: './SemanticModel/components/ModelMetric', + // envEnableList: [ENV_KEY.SEMANTIC], + // routes: [ + // { + // path: '/model/:domainId/:modelId/metric/list', + // component: './SemanticModel/components/ClassMetricTable', + // envEnableList: [ENV_KEY.SEMANTIC], + // }, + // ], + // }, ], }, + // { + // path: '/model/', + // component: './SemanticModel/DomainManager', + // name: 'semanticModel', + // envEnableList: [ENV_KEY.SEMANTIC], + // routes: [ + // { + // path: '/model/:domainId/:modelId', + // component: './SemanticModel/DomainManager', + // envEnableList: [ENV_KEY.SEMANTIC], + // }, + // { + // path: '/model/:domainId/:modelId/:menuKey', + // component: './SemanticModel/DomainManager', + // envEnableList: [ENV_KEY.SEMANTIC], + // }, + // { + // path: '/model/:domainId/:modelId/metric', + // component: './SemanticModel/components/ModelMetric', + // envEnableList: [ENV_KEY.SEMANTIC], + // routes: [ + // { + // path: '/model/:domainId/:modelId/metric/list', + // component: './SemanticModel/components/ClassMetricTable', + // envEnableList: [ENV_KEY.SEMANTIC], + // }, + // ], + // }, + // ], + // }, // { // path: '/model/:domainId/:modelId/:menuKey', diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/index.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/index.tsx new file mode 100644 index 000000000..81dc60372 --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/index.tsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { Outlet } from '@umijs/max'; + +const Dimension: React.FC = () => { + return ( + <> + + + ); +}; + +export default Dimension; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/style.less b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Dimension/style.less new file mode 100644 index 000000000..e69de29bb diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx index 6b6106d62..4ff772a5d 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx @@ -1,12 +1,108 @@ -import React from 'react'; -import OverviewContainer from './OverviewContainer'; +import { message } from 'antd'; +import React, { useEffect, useState } from 'react'; +import { history, useParams, useModel } from '@umijs/max'; +import { ISemantic } from './data'; +import { getDomainList, getDataSetList } from './service'; +import DomainManagerTab from './components/DomainManagerTab'; +import { isArrayOfValues } from '@/utils/utils'; type Props = {}; -const DomainManager: React.FC = () => { + +const DomainManager: React.FC = ({}) => { + const defaultTabKey = 'overview'; + const params: any = useParams(); + const domainId = params.domainId; + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + const databaseModel = useModel('SemanticModel.databaseData'); + const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; + const { selectModelId } = modelModel; + const { MrefreshDatabaseList } = databaseModel; + const menuKey = params.menuKey ? params.menuKey : defaultTabKey; + const [collapsedState, setCollapsedState] = useState(true); + const [activeKey, setActiveKey] = useState(menuKey); + const [dataSetList, setDataSetList] = useState([]); + + // const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { + // const targetNode = domainList.filter((item: any) => { + // return `${item.id}` === domainId; + // })[0]; + // if (!targetNode) { + // const firstRootNode = domainList.filter((item: any) => { + // return item.parentId === 0; + // })[0]; + // if (firstRootNode) { + // const { id } = firstRootNode; + // setSelectDomain(firstRootNode); + // setActiveKey(menuKey); + // pushUrlMenu(id, 0, menuKey); + // } + // } else { + // setSelectDomain(targetNode); + // } + // }; + + // const initProjectTree = async () => { + // const { code, data, msg } = await getDomainList(); + // if (code === 200) { + // initSelectedDomain(data); + // setDomainList(data); + // } else { + // message.error(msg); + // } + // }; + + // useEffect(() => { + // initProjectTree(); + // MrefreshDatabaseList(); + // }, []); + + // useEffect(() => { + // if (!selectDomainId) { + // return; + // } + // // queryModelList(); + // queryDataSetList(); + // }, [selectDomainId]); + + // const queryDataSetList = async () => { + // const { code, data, msg } = await getDataSetList(selectDomainId); + // if (code === 200) { + // setDataSetList(data); + // if (!isArrayOfValues(data)) { + // setActiveKey(defaultTabKey); + // } + // } else { + // message.error(msg); + // } + // }; + + const pushUrlMenu = (domainId: number, menuKey: string) => { + history.push(`/model/${domainId}/${menuKey}`); + }; + + const cleanModelInfo = (domainId) => { + setActiveKey(defaultTabKey); + pushUrlMenu(domainId, defaultTabKey); + // setSelectModel(undefined); + }; + + // const handleCollapsedBtn = () => { + // setCollapsedState(!collapsedState); + // }; + return ( - <> - - + { + cleanModelInfo(selectDomainId); + }} + onMenuChange={(menuKey) => { + setActiveKey(menuKey); + pushUrlMenu(selectDomainId, menuKey); + }} + /> ); }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx index c7c0b6460..2efcc3291 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx @@ -904,7 +904,9 @@ const MetricInfoCreateForm: React.FC = ({ type="primary" key="console" onClick={() => { - history.replace(`/model/${domainId}/${modelId || metricItem?.modelId}/dataSource`); + history.replace( + `/model/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, + ); onCancel?.(); }} > diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx new file mode 100644 index 000000000..9ec4abd73 --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx @@ -0,0 +1,71 @@ +import React, { useEffect, useState } from 'react'; +import { history, useParams, useModel } from '@umijs/max'; +import ModelManagerTab from './components/ModelManagerTab'; + +type Props = {}; + +const OverviewContainer: React.FC = ({}) => { + const defaultTabKey = 'overview'; + const params: any = useParams(); + const domainId = params.domainId; + const modelId = params.modelId; + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + const dimensionModel = useModel('SemanticModel.dimensionData'); + const metricModel = useModel('SemanticModel.metricData'); + const databaseModel = useModel('SemanticModel.databaseData'); + const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; + const { + selectModelId, + modelList, + MrefreshModelList, + setSelectModel, + setModelTableHistoryParams, + } = modelModel; + const { MrefreshDimensionList } = dimensionModel; + const { MrefreshMetricList } = metricModel; + const { MrefreshDatabaseList } = databaseModel; + const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; + const [activeKey, setActiveKey] = useState(menuKey); + + const initModelConfig = () => { + const currentMenuKey = menuKey === defaultTabKey ? '' : menuKey; + pushUrlMenu(selectDomainId, selectModelId, currentMenuKey); + setActiveKey(currentMenuKey); + }; + + useEffect(() => { + if (!selectModelId) { + return; + } + initModelConfig(); + MrefreshDimensionList({ modelId: selectModelId }); + MrefreshMetricList({ modelId: selectModelId }); + }, [selectModelId]); + + const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { + history.push(`/model/manager/${domainId}/${modelId}/${menuKey}`); + }; + + const cleanModelInfo = (domainId) => { + setActiveKey(defaultTabKey); + pushUrlMenu(domainId, 0, defaultTabKey); + setSelectModel(undefined); + }; + + return ( + { + cleanModelInfo(selectDomainId); + }} + onMenuChange={(menuKey) => { + setActiveKey(menuKey); + pushUrlMenu(selectDomainId, selectModelId, menuKey); + }} + /> + ); +}; + +export default OverviewContainer; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx similarity index 97% rename from webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx rename to webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx index 20786ca24..63534cd48 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx @@ -8,14 +8,14 @@ import { ISemantic } from './data'; import { getDomainList, getDataSetList } from './service'; import DomainManagerTab from './components/DomainManagerTab'; import { isArrayOfValues } from '@/utils/utils'; +import OverviewContainerRight from './components/OverviewContainerRight'; type Props = { mode: 'domain'; }; -const OverviewContainer: React.FC = ({ mode }) => { +const OverviewContainer: React.FC = ({ mode = 'domain' }) => { const defaultTabKey = 'overview'; - // 'overview' dataSetManage const params: any = useParams(); const domainId = params.domainId; const modelId = params.modelId; @@ -184,6 +184,7 @@ const OverviewContainer: React.FC = ({ mode }) => {
{selectDomainId ? ( <> + { + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + + const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; + const { selectModelId, selectModelName, setSelectModel } = modelModel; + + return ( + <> + { + // onBackDomainBtnClick?.(); + setSelectModel(undefined); + history.push(`/model/${selectDomainId}/overview`); + }} + style={ + selectModelName ? { cursor: 'pointer' } : { color: '#296df3', fontWeight: 'bold' } + } + > + + {selectDomainName} + + ), + }, + { + type: 'separator', + separator: selectModelName ? '/' : '', + }, + { + title: selectModelName ? ( + { + history.push(`/model/manager/${selectDomainId}/${selectModelId}/`); + }} + style={{ color: '#296df3' }} + > + + {selectModelName} + + ) : undefined, + }, + ]} + /> + + + ); +}; + +export default OverviewContainerRight; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx index 260c61062..2fea630fd 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx @@ -13,11 +13,11 @@ import { ColumnsConfig } from '../../components/TableColumnRender'; import ViewSearchFormModal from './ViewSearchFormModal'; type Props = { - dataSetList: ISemantic.IDatasetItem[]; + // dataSetList: ISemantic.IDatasetItem[]; disabledEdit?: boolean; }; -const DataSetTable: React.FC = ({ dataSetList, disabledEdit = false }) => { +const DataSetTable: React.FC = ({ disabledEdit = false }) => { const domainModel = useModel('SemanticModel.domainData'); const { selectDomainId } = domainModel; @@ -43,14 +43,14 @@ const DataSetTable: React.FC = ({ dataSetList, disabledEdit = false }) => } }; - const [viewList, setViewList] = useState(dataSetList); + const [viewList, setViewList] = useState(); + + // useEffect(() => { + // setViewList(dataSetList); + // }, [dataSetList]); useEffect(() => { - setViewList(dataSetList); - }, [dataSetList]); - - useEffect(() => { - // queryDataSetList(); + queryDataSetList(); queryDomainAllModel(); }, [selectDomainId]); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/index.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/index.tsx index 962b9e1f8..f36197f88 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/index.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/index.tsx @@ -4,13 +4,12 @@ import DataSetTable from './components/DataSetTable'; type Props = { disabledEdit?: boolean; - dataSetList: ISemantic.IDatasetItem[]; }; -const View: React.FC = ({ dataSetList, disabledEdit = false }) => { +const View: React.FC = ({ disabledEdit = false }) => { return (
- +
); }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx index c5da0348f..9e0958a2a 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx @@ -142,7 +142,11 @@ const ClassMetricTable: React.FC = ({ onEmptyMetricData }) => { } }; - const columnsConfig = ColumnsConfig({ indicatorInfo: { url: '/model/metric/edit/' } }); + const columnsConfig = ColumnsConfig({ + indicatorInfo: { + url: '/model/metric/edit/', + }, + }); const columns: ProColumns[] = [ { diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainManagerTab.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainManagerTab.tsx index 9e54e39fd..0825af508 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainManagerTab.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainManagerTab.tsx @@ -11,23 +11,24 @@ import styles from './style.less'; import { HomeOutlined, FundViewOutlined } from '@ant-design/icons'; import { ISemantic } from '../data'; import SemanticGraphCanvas from '../SemanticGraphCanvas'; +import Dimension from '../Dimension'; +import ModelMetric from '../components/ModelMetric'; import View from '../View'; type Props = { - isModel: boolean; + // isModel: boolean; activeKey: string; - modelList: ISemantic.IModelItem[]; + // modelList: ISemantic.IModelItem[]; dataSetList: ISemantic.IDatasetItem[]; - handleModelChange: (model?: ISemantic.IModelItem) => void; + // handleModelChange: (model?: ISemantic.IModelItem) => void; onBackDomainBtnClick?: () => void; onMenuChange?: (menuKey: string) => void; }; const DomainManagerTab: React.FC = ({ - isModel, activeKey, - modelList, + // modelList, dataSetList, - handleModelChange, + // handleModelChange, onBackDomainBtnClick, onMenuChange, }) => { @@ -38,7 +39,7 @@ const DomainManagerTab: React.FC = ({ const modelModel = useModel('SemanticModel.modelData'); const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; - const { selectModelId, selectModelName } = modelModel; + const { selectModelId, modelList, selectModelName } = modelModel; useEffect(() => { initState.current = false; @@ -50,7 +51,7 @@ const DomainManagerTab: React.FC = ({ label: '数据集管理', key: 'overview', hidden: !!domainData?.parentId, - children: , + children: , }, { label: '模型管理', @@ -59,9 +60,9 @@ const DomainManagerTab: React.FC = ({ showModelType === 'list' ? ( { - handleModelChange(model); - }} + // onModelChange={(model) => { + // handleModelChange(model); + // }} /> ) : (
@@ -98,36 +99,9 @@ const DomainManagerTab: React.FC = ({ return item.key !== 'permissonSetting'; }); - const isModelItem = [ - { - label: '指标管理', - key: 'metric', - children: ( - { - if (!initState.current) { - initState.current = true; - onMenuChange?.('dimenstion'); - } - }} - /> - ), - }, - { - label: '维度管理', - key: 'dimenstion', - children: , - }, - { - label: '权限管理', - key: 'permissonSetting', - children: , - }, - ]; - const getActiveKey = () => { const key = activeKey || defaultTabKey; - const tabItems = !isModel ? tabItem : isModelItem; + const tabItems = tabItem; const tabItemsKeys = tabItems.map((item) => item.key); if (!tabItemsKeys.includes(key)) { return tabItemsKeys[0]; @@ -137,47 +111,9 @@ const DomainManagerTab: React.FC = ({ return (
- { - onBackDomainBtnClick?.(); - }} - style={ - selectModelName ? { cursor: 'pointer' } : { color: '#296df3', fontWeight: 'bold' } - } - > - - {selectDomainName} - - ), - }, - { - type: 'separator', - separator: selectModelName ? '/' : '', - }, - { - title: selectModelName ? ( - { - history.push(`/model/${selectDomainId}/${selectModelId}/`); - }} - style={{ color: '#296df3' }} - > - - {selectModelName} - - ) : undefined, - }, - ]} - /> = ({ type="primary" key="console" onClick={() => { - history.replace(`/model/${domainId}/${modelId || metricItem?.modelId}/dataSource`); + history.replace( + `/model/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, + ); onCancel?.(); }} > diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx new file mode 100644 index 000000000..dcd8749eb --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx @@ -0,0 +1,102 @@ +import { Tabs, Breadcrumb, Space, Radio } from 'antd'; +import React, { useRef, useEffect, useState } from 'react'; +import { history, useModel } from '@umijs/max'; +import ClassDimensionTable from './ClassDimensionTable'; +import ClassMetricTable from './ClassMetricTable'; +import PermissionSection from './Permission/PermissionSection'; +import TagObjectTable from '../Insights/components/TagObjectTable'; +import TermTable from '../components/Term/TermTable'; +import OverView from './OverView'; +import styles from './style.less'; +import { HomeOutlined, FundViewOutlined } from '@ant-design/icons'; +import { ISemantic } from '../data'; +import SemanticGraphCanvas from '../SemanticGraphCanvas'; +import Dimension from '../Dimension'; +import ModelMetric from '../components/ModelMetric'; +import View from '../View'; + +type Props = { + activeKey: string; + modelList: ISemantic.IModelItem[]; + handleModelChange: (model?: ISemantic.IModelItem) => void; + onBackDomainBtnClick?: () => void; + onMenuChange?: (menuKey: string) => void; +}; +const ModelManagerTab: React.FC = ({ + activeKey, + modelList, + handleModelChange, + onBackDomainBtnClick, + onMenuChange, +}) => { + const initState = useRef(false); + const defaultTabKey = 'metric'; + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + + const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; + const { selectModelId, selectModelName } = modelModel; + + useEffect(() => { + console.log(modelList, 'modelList'); + }, [modelList]); + + useEffect(() => { + initState.current = false; + }, [selectModelId]); + + const isModelItem = [ + { + label: '指标管理', + key: 'metric', + // children: , + children: ( + { + if (!initState.current) { + initState.current = true; + onMenuChange?.('dimension'); + } + }} + /> + ), + }, + { + label: '维度管理', + key: 'dimension', + children: , + // children: , + }, + { + label: '权限管理', + key: 'permissonSetting', + children: , + }, + ]; + + const getActiveKey = () => { + const key = activeKey || defaultTabKey; + const tabItems = isModelItem; + const tabItemsKeys = tabItems.map((item) => item.key); + if (!tabItemsKeys.includes(key)) { + return tabItemsKeys[0]; + } + return key; + }; + + return ( +
+ { + onMenuChange?.(menuKey); + }} + /> +
+ ); +}; + +export default ModelManagerTab; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/index.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/index.tsx new file mode 100644 index 000000000..81dc60372 --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/index.tsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { Outlet } from '@umijs/max'; + +const Dimension: React.FC = () => { + return ( + <> + + + ); +}; + +export default Dimension; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/style.less b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelMetric/style.less new file mode 100644 index 000000000..e69de29bb diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx index 1ecd296da..ecd69002f 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx @@ -3,7 +3,7 @@ import { ProTable } from '@ant-design/pro-components'; import { message, Button, Space, Popconfirm, Input } from 'antd'; import React, { useRef, useState, useEffect } from 'react'; import { StatusEnum } from '../enum'; -import { useModel } from '@umijs/max'; +import { useModel, history } from '@umijs/max'; import { deleteModel, batchUpdateModelStatus } from '../service'; import ClassModelTypeModal from './ClassModelTypeModal'; import { ColumnsConfig } from './TableColumnRender'; @@ -22,14 +22,14 @@ const ModelTable: React.FC = ({ modelList, disabledEdit = false, onModelC const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); const { selectDomainId } = domainModel; - const { modelTableHistoryParams, setModelTableHistoryParams } = modelModel; + const { modelTableHistoryParams, setModelTableHistoryParams, setSelectModel } = modelModel; const [modelItem, setModelItem] = useState(); const [filterParams, setFilterParams] = useState>({}); const [createDataSourceModalOpen, setCreateDataSourceModalOpen] = useState(false); const [currentPageNumber, setCurrentPageNumber] = useState(1); const actionRef = useRef(); - + const [isEditing, setIsEditing] = useState(false); const [tableData, setTableData] = useState([]); const params = modelTableHistoryParams?.[selectDomainId]; @@ -100,10 +100,14 @@ const ModelTable: React.FC = ({ modelList, disabledEdit = false, onModelC title: '模型名称', search: false, render: (_, record) => { + const { domainId, id } = record; return ( { - onModelChange?.(record); + setSelectModel(record); + + history.push(`/model/manager/${domainId}/${id}`); + // onModelChange?.(record); }} > {_} @@ -161,6 +165,7 @@ const ModelTable: React.FC = ({ modelList, disabledEdit = false, onModelC onClick={() => { setModelItem(record); setCreateDataSourceModalOpen(true); + setIsEditing(true); }} > 编辑 @@ -209,84 +214,88 @@ const ModelTable: React.FC = ({ modelList, disabledEdit = false, onModelC return ( <> - { - return false; - }} - pagination={{ - current: currentPageNumber, - onChange: (pageNumber) => { - setCurrentPageNumber(pageNumber); - dipatchParams({ - ...filterParams, - pageNumber: `${pageNumber}`, - }); - }, - }} - headerTitle={ - { - setCurrentPageNumber(1); - dipatchParams({ - ...filterParams, - key: value, - pageNumber: `1`, - }); - setFilterParams((preState) => { - return { - ...preState, +
+ { + return false; + }} + pagination={{ + current: currentPageNumber, + onChange: (pageNumber) => { + setCurrentPageNumber(pageNumber); + dipatchParams({ + ...filterParams, + pageNumber: `${pageNumber}`, + }); + }, + }} + headerTitle={ + { + setCurrentPageNumber(1); + dipatchParams({ + ...filterParams, key: value, - }; - }); + pageNumber: `1`, + }); + setFilterParams((preState) => { + return { + ...preState, + key: value, + }; + }); + }} + /> + ), + }, + ]} + /> + } + size="small" + options={{ reload: false, density: false, fullScreen: false }} + toolBarRender={() => + disabledEdit + ? [<>] + : [ + , - ] - } - /> + > + 创建模型 + , + ] + } + /> +
{createDataSourceModalOpen && ( { onModelChange?.(); + setIsEditing(false); setCreateDataSourceModalOpen(false); }} onCancel={() => { + setIsEditing(false); setCreateDataSourceModalOpen(false); }} /> diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx index f592344c4..c24b816c3 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx @@ -16,6 +16,7 @@ export const ColumnsConfig: any = (params?: { indicatorInfo?: { url?: string; starType?: StarType; + onNameClick?: (record: ISemantic.IMetricItem) => void; }; }) => { return { @@ -117,11 +118,15 @@ export const ColumnsConfig: any = (params?: { className={styles.textLink} style={{ fontWeight: 500 }} onClick={(event: any) => { - history.push(`${url}${id}`); + if (params?.indicatorInfo?.onNameClick) { + params?.indicatorInfo?.onNameClick(record); + } else { + history.push(`${url}${id}`); + } event.preventDefault(); event.stopPropagation(); }} - href={`/webapp${url}${id}`} + // href={`/webapp${url}${id}`} > {name}
diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx index 01f7068a0..7120a1482 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx @@ -1,7 +1,189 @@ -import React from 'react'; +import { message } from 'antd'; +import React, { useEffect, useState } from 'react'; +import { history, useParams, useModel } from '@umijs/max'; +import DomainListTree from './components/DomainList'; +import styles from './components/style.less'; +import { LeftOutlined, RightOutlined } from '@ant-design/icons'; +import { ISemantic } from './data'; +import { getDomainList, getDataSetList } from './service'; +import { isArrayOfValues } from '@/utils/utils'; +import OverviewContainerRight from './OverviewContainerRight'; -const classManager: React.FC = ({ children }) => { - return
{children}
; +type Props = { + mode: 'domain'; }; -export default classManager; +const OverviewContainer: React.FC = ({ mode = 'domain' }) => { + const defaultTabKey = 'overview'; + const params: any = useParams(); + const domainId = params.domainId; + const modelId = params.modelId; + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + const dimensionModel = useModel('SemanticModel.dimensionData'); + const metricModel = useModel('SemanticModel.metricData'); + const databaseModel = useModel('SemanticModel.databaseData'); + const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; + const { + selectModelId, + modelList, + MrefreshModelList, + setSelectModel, + setModelTableHistoryParams, + } = modelModel; + const { MrefreshDimensionList } = dimensionModel; + const { MrefreshMetricList } = metricModel; + const { MrefreshDatabaseList } = databaseModel; + const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; + + const [collapsedState, setCollapsedState] = useState(true); + const [activeKey, setActiveKey] = useState(menuKey); + // const [dataSetList, setDataSetList] = useState([]); + + const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { + const targetNode = domainList.filter((item: any) => { + return `${item.id}` === domainId; + })[0]; + if (!targetNode) { + const firstRootNode = domainList.filter((item: any) => { + return item.parentId === 0; + })[0]; + if (firstRootNode) { + const { id } = firstRootNode; + setSelectDomain(firstRootNode); + setActiveKey(menuKey); + pushUrlMenu(id, 0, menuKey); + } + } else { + setSelectDomain(targetNode); + } + }; + + const initProjectTree = async () => { + const { code, data, msg } = await getDomainList(); + if (code === 200) { + initSelectedDomain(data); + setDomainList(data); + } else { + message.error(msg); + } + }; + + useEffect(() => { + initProjectTree(); + MrefreshDatabaseList(); + return () => { + setSelectDomain(undefined); + // setSelectModel(undefined); + }; + }, []); + + useEffect(() => { + if (!selectDomainId) { + return; + } + console.log(selectDomainId, 'selectDomainIdselectDomainId'); + queryModelList(); + // queryDataSetList(); + }, [selectDomainId]); + + // const queryDataSetList = async () => { + // const { code, data, msg } = await getDataSetList(selectDomainId); + // if (code === 200) { + // setDataSetList(data); + // if (!isArrayOfValues(data)) { + // setActiveKey(defaultTabKey); + // } + // } else { + // message.error(msg); + // } + // }; + + const queryModelList = async () => { + await MrefreshModelList(selectDomainId); + }; + + // const initModelConfig = () => { + // const currentMenuKey = menuKey === defaultTabKey ? '' : menuKey; + // pushUrlMenu(selectDomainId, selectModelId, currentMenuKey); + // setActiveKey(currentMenuKey); + // }; + + // useEffect(() => { + // if (!selectModelId) { + // return; + // } + // // initModelConfig(); + // MrefreshDimensionList({ modelId: selectModelId }); + // MrefreshMetricList({ modelId: selectModelId }); + // }, [selectModelId]); + + const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { + history.push(`/model/${domainId}/${menuKey}`); + }; + + // // const handleModelChange = (model?: ISemantic.IModelItem) => { + // // if (!model) { + // // return; + // // } + // // if (`${model.id}` === `${selectModelId}`) { + // // initModelConfig(); + // // } + // // setSelectModel(model); + // // }; + + const cleanModelInfo = (domainId) => { + setActiveKey(defaultTabKey); + pushUrlMenu(domainId, 0, defaultTabKey); + setSelectModel(undefined); + }; + + const handleCollapsedBtn = () => { + setCollapsedState(!collapsedState); + }; + + return ( +
+
+
+
+ { + const { id } = domainData; + cleanModelInfo(id); + setSelectDomain(domainData); + setModelTableHistoryParams({ + [id]: {}, + }); + }} + onTreeDataUpdate={() => { + initProjectTree(); + }} + /> +
+ +
{ + handleCollapsedBtn(); + }} + > + {collapsedState ? : } +
+
+
+ {selectDomainId ? ( + <> + + + ) : ( +

请选择项目

+ )} +
+
+
+ ); +}; + +export default OverviewContainer; From 296ce5cc55fbb4ff88ae968f87609a5aa8f91a17 Mon Sep 17 00:00:00 2001 From: lxwcodemonkey Date: Sat, 16 Nov 2024 21:44:50 +0800 Subject: [PATCH 12/88] [improvement][Chat] Support agent permission management #1143 --- .../java/com/tencent/supersonic/headless/SchemaAuthTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java index e93e5bcb6..661262d50 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/SchemaAuthTest.java @@ -50,7 +50,8 @@ public class SchemaAuthTest extends BaseTest { @Test public void test_getVisibleModelList_alice() { User user = DataUtils.getUserAlice(); - List modelResps = modelService.getModelListWithAuth(user, null, AuthType.VIEWER); + List modelResps = + modelService.getModelListWithAuth(user, null, AuthType.VIEWER); List expectedModelBizNames = Lists.newArrayList("user_department", "singer"); Assertions.assertEquals(expectedModelBizNames, modelResps.stream().map(ModelResp::getBizName).collect(Collectors.toList())); From c22e3ef2e8f240907c9c6bad3de2b29f28181c5f Mon Sep 17 00:00:00 2001 From: daikon12 <1059907724@qq.com> Date: Sun, 24 Nov 2024 16:20:57 +0800 Subject: [PATCH 13/88] [improvement](Dict)Support returns dict task list of dimensions by page --- .../supersonic/common/util/DateUtils.java | 8 +++++++ .../api/pojo/request/ValueTaskQueryReq.java | 23 +++++++++++++++++++ .../repository/DictRepository.java | 3 +++ .../repository/impl/DictRepositoryImpl.java | 21 +++++++++++++++++ .../server/rest/KnowledgeController.java | 13 +++++++++++ .../server/service/DictTaskService.java | 3 +++ .../service/impl/DictTaskServiceImpl.java | 23 ++++++++++++++++--- .../headless/server/utils/DictUtils.java | 9 ++++++++ 8 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ValueTaskQueryReq.java diff --git a/common/src/main/java/com/tencent/supersonic/common/util/DateUtils.java b/common/src/main/java/com/tencent/supersonic/common/util/DateUtils.java index 1b2007ed9..5ab669aa4 100644 --- a/common/src/main/java/com/tencent/supersonic/common/util/DateUtils.java +++ b/common/src/main/java/com/tencent/supersonic/common/util/DateUtils.java @@ -16,6 +16,7 @@ import java.time.temporal.TemporalAdjuster; import java.time.temporal.TemporalAdjusters; import java.util.ArrayList; import java.util.Arrays; +import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Objects; @@ -201,6 +202,13 @@ public class DateUtils { return false; } + public static Long calculateDiffMs(Date createAt) { + Calendar calendar = Calendar.getInstance(); + Date now = calendar.getTime(); + long milliseconds = now.getTime() - createAt.getTime(); + return milliseconds; + } + public static boolean isDateString(String value, String format) { try { DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ValueTaskQueryReq.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ValueTaskQueryReq.java new file mode 100644 index 000000000..f425ff261 --- /dev/null +++ b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ValueTaskQueryReq.java @@ -0,0 +1,23 @@ +package com.tencent.supersonic.headless.api.pojo.request; + +import javax.validation.constraints.NotNull; + +import com.tencent.supersonic.common.pojo.PageBaseReq; +import lombok.Data; + +import java.util.List; + +/** + * @author: kanedai + * @date: 2024/11/24 + */ +@Data +public class ValueTaskQueryReq extends PageBaseReq { + + @NotNull + private Long itemId; + + private List taskStatusList; + + private String key; +} diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/DictRepository.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/DictRepository.java index 37008166c..66adedbb0 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/DictRepository.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/DictRepository.java @@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.persistence.repository; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; +import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.server.persistence.dataobject.DictConfDO; @@ -26,4 +27,6 @@ public interface DictRepository { DictTaskDO queryDictTaskById(Long id); DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq); + + List queryAllDictTask(ValueTaskQueryReq taskQueryReq); } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/DictRepositoryImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/DictRepositoryImpl.java index 1abaa39eb..55ffe1d11 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/DictRepositoryImpl.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/DictRepositoryImpl.java @@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; +import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DimensionResp; @@ -14,11 +15,14 @@ import com.tencent.supersonic.headless.server.persistence.mapper.DictTaskMapper; import com.tencent.supersonic.headless.server.persistence.repository.DictRepository; import com.tencent.supersonic.headless.server.service.DimensionService; import com.tencent.supersonic.headless.server.utils.DictUtils; +import com.xkzhangsan.time.utils.CollectionUtil; import lombok.extern.slf4j.Slf4j; +import org.codehaus.plexus.util.StringUtils; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Repository; import org.springframework.util.CollectionUtils; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; @@ -100,6 +104,23 @@ public class DictRepositoryImpl implements DictRepository { return taskResp; } + @Override + public List queryAllDictTask(ValueTaskQueryReq taskQueryReq) { + QueryWrapper wrapper = new QueryWrapper<>(); + if (Objects.nonNull(taskQueryReq.getItemId())) { + wrapper.lambda().eq(DictTaskDO::getItemId, taskQueryReq.getItemId()); + } + if (CollectionUtil.isNotEmpty(taskQueryReq.getTaskStatusList())) { + wrapper.lambda().in(DictTaskDO::getStatus, taskQueryReq.getTaskStatusList()); + } + if (StringUtils.isNotEmpty(taskQueryReq.getKey())) { + String key = taskQueryReq.getKey(); + wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or() + .like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key)); + } + return dictTaskMapper.selectList(wrapper); + } + @Override public Long addDictConf(DictConfDO dictConfDO) { dictConfMapper.insert(dictConfDO); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/rest/KnowledgeController.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/rest/KnowledgeController.java index 7ce9e6d4c..e813e496b 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/rest/KnowledgeController.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/rest/KnowledgeController.java @@ -13,6 +13,7 @@ import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictItemReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; +import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; @@ -132,6 +133,18 @@ public class KnowledgeController { return taskService.queryLatestDictTask(taskReq, user); } + /** + * queryDictTask-分页返回维度的字典任务列表 + * + * @param taskQueryReq + */ + @PostMapping("/task/search/page") + public PageInfo queryDictTask(@RequestBody ValueTaskQueryReq taskQueryReq, + HttpServletRequest request, HttpServletResponse response) { + User user = UserHolder.findUser(request, response); + return taskService.queryDictTask(taskQueryReq, user); + } + @GetMapping("/embedding/reload") public Object reloadEmbedding() { metaEmbeddingTask.reloadMetaEmbedding(); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/DictTaskService.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/DictTaskService.java index 9e2bee6cb..6e66641ac 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/DictTaskService.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/DictTaskService.java @@ -4,6 +4,7 @@ import com.github.pagehelper.PageInfo; import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; +import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; @@ -17,6 +18,8 @@ public interface DictTaskService { DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq, User user); + PageInfo queryDictTask(ValueTaskQueryReq taskQueryReq, User user); + PageInfo queryDictValue(DictValueReq dictValueReq, User user); String queryDictFilePath(DictValueReq dictValueReq, User user); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DictTaskServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DictTaskServiceImpl.java index d83e8e2de..da70b7de0 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DictTaskServiceImpl.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/DictTaskServiceImpl.java @@ -1,15 +1,18 @@ package com.tencent.supersonic.headless.server.service.impl; +import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageInfo; import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum; import com.tencent.supersonic.common.util.BeanMapper; +import com.tencent.supersonic.common.util.DateUtils; import com.tencent.supersonic.headless.api.pojo.DimValueMap; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; +import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; @@ -116,14 +119,17 @@ public class DictTaskServiceImpl implements DictTaskService { fileHandler.writeFile(data, fileName, false); // 3.Change in-memory dictionary data in real time + String status = TaskStatusEnum.SUCCESS.getStatus(); try { dictWordService.loadDictWord(); - - dictTaskDO.setStatus(TaskStatusEnum.SUCCESS.getStatus()); - dictRepository.editDictTask(dictTaskDO); } catch (Exception e) { log.error("reloadCustomDictionary error", e); + status = TaskStatusEnum.ERROR.getStatus(); + dictTaskDO.setDescription(e.toString()); } + dictTaskDO.setStatus(status); + dictTaskDO.setElapsedMs(DateUtils.calculateDiffMs(dictTaskDO.getCreatedAt())); + dictRepository.editDictTask(dictTaskDO); } @Override @@ -164,6 +170,17 @@ public class DictTaskServiceImpl implements DictTaskService { return dictRepository.queryLatestDictTask(taskReq); } + @Override + public PageInfo queryDictTask(ValueTaskQueryReq taskQueryReq, User user) { + PageInfo dictTaskDOPageInfo = + PageHelper.startPage(taskQueryReq.getCurrent(), taskQueryReq.getPageSize()) + .doSelectPageInfo(() -> dictRepository.queryAllDictTask(taskQueryReq)); + PageInfo dictTaskRespPageInfo = new PageInfo<>(); + BeanMapper.mapper(dictTaskDOPageInfo, dictTaskRespPageInfo); + dictTaskRespPageInfo.setList(dictConverter.taskDO2Resp(dictTaskDOPageInfo.getList())); + return dictTaskRespPageInfo; + } + @Override public PageInfo queryDictValue(DictValueReq dictValueReq, User user) { // todo 优化读取内存结构 diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DictUtils.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DictUtils.java index aa3fb47bd..e3d76bf97 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DictUtils.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DictUtils.java @@ -34,6 +34,7 @@ import com.tencent.supersonic.headless.server.service.DimensionService; import com.tencent.supersonic.headless.server.service.MetricService; import com.tencent.supersonic.headless.server.service.ModelService; import com.tencent.supersonic.headless.server.service.TagMetaService; +import com.xkzhangsan.time.utils.CollectionUtil; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.BeanUtils; @@ -533,4 +534,12 @@ public class DictUtils { resp.setConfig(JsonUtil.toObject(dictTaskDO.getConfig(), ItemValueConfig.class)); return resp; } + + public List taskDO2Resp(List dictTaskDOList) { + List dictTaskRespList = new ArrayList<>(); + if (CollectionUtil.isNotEmpty(dictTaskDOList)) { + dictTaskDOList.stream().forEach(taskDO -> dictTaskRespList.add(taskDO2Resp(taskDO))); + } + return dictTaskRespList; + } } From 860fd5d299a8fcfba2583bc6091041a650fb81ed Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Sun, 24 Nov 2024 19:07:56 +0800 Subject: [PATCH 14/88] [improvement][headless]Clean code logic of headless translator. --- .../common/jsqlparser/SqlReplaceHelper.java | 8 +- .../supersonic/common/util/DateModeUtils.java | 12 +- .../jsqlparser/SqlReplaceHelperTest.java | 4 +- .../api/pojo/request/ParseSqlReq.java | 24 - .../api/pojo/request/QueryFilter.java | 3 +- .../headless/core/pojo/DataSetQueryParam.java | 14 - .../headless/core/pojo/MetricQueryParam.java | 17 - .../headless/core/pojo/QueryStatement.java | 22 +- .../headless/core/pojo/SqlQueryParam.java | 12 + .../headless/core/pojo/StructQueryParam.java} | 21 +- .../translator/DefaultSemanticTranslator.java | 524 ++---------------- .../core/translator/DetailQueryOptimizer.java | 30 +- .../headless/core/translator/QueryParser.java | 3 +- .../calcite/CalciteQueryParser.java | 5 +- .../calcite/s2sql/OntologyQueryParam.java} | 10 +- .../translator/calcite/sql/SqlBuilder.java | 62 +-- .../calcite/sql/node/DataModelNode.java | 12 +- .../calcite/sql/render/FilterRender.java | 4 +- .../calcite/sql/render/JoinRender.java | 4 +- .../calcite/sql/render/OutputRender.java | 4 +- .../calcite/sql/render/Renderer.java | 4 +- .../calcite/sql/render/SourceRender.java | 14 +- .../converter/DefaultDimValueConverter.java | 15 +- ...nverter.java => MetricRatioConverter.java} | 291 +++++----- .../converter/ParserDefaultConverter.java | 74 --- .../converter/SqlQueryConverter.java | 308 ++++++++++ ...nverter.java => SqlVariableConverter.java} | 8 +- .../converter/StructQueryConverter.java | 74 +++ .../headless/core/utils/SqlGenerateUtils.java | 67 +-- .../aggregate/CalciteSqlParserTest.java | 3 +- .../service/impl/S2SemanticLayerService.java | 73 ++- .../server/utils/MetricDrillDownChecker.java | 3 +- .../calcite/HeadlessParserServiceTest.java | 23 +- .../server/utils/QueryNLReqBuilderTest.java | 3 - .../main/resources/META-INF/spring.factories | 10 +- .../supersonic/demo/S2CompanyDemo.java | 25 +- .../tencent/supersonic/demo/S2VisitsDemo.java | 3 +- .../main/resources/META-INF/spring.factories | 7 +- .../com/tencent/supersonic/chat/BaseTest.java | 1 - .../tencent/supersonic/chat/DetailTest.java | 6 +- .../tencent/supersonic/chat/MetricTest.java | 33 +- .../headless/QueryByMetricTest.java | 1 + .../headless/QueryByStructTest.java | 2 + .../tencent/supersonic/util/DataUtils.java | 8 +- .../src/test/resources/s2-config.yaml | 2 +- 45 files changed, 795 insertions(+), 1058 deletions(-) delete mode 100644 headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ParseSqlReq.java delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/DataSetQueryParam.java delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/MetricQueryParam.java create mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/SqlQueryParam.java rename headless/{api/src/main/java/com/tencent/supersonic/headless/api/pojo/QueryParam.java => core/src/main/java/com/tencent/supersonic/headless/core/pojo/StructQueryParam.java} (57%) rename headless/{api/src/main/java/com/tencent/supersonic/headless/api/pojo/MetricTable.java => core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java} (57%) rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/{CalculateAggConverter.java => MetricRatioConverter.java} (50%) delete mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java create mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java rename headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/{SqlVariableParseConverter.java => SqlVariableConverter.java} (86%) create mode 100644 headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java diff --git a/common/src/main/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelper.java b/common/src/main/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelper.java index 6604f2487..d5443c328 100644 --- a/common/src/main/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelper.java +++ b/common/src/main/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelper.java @@ -528,7 +528,7 @@ public class SqlReplaceHelper { } } - private static Select replaceAggAliasOrderItem(Select selectStatement) { + private static Select replaceAggAliasOrderbyField(Select selectStatement) { if (selectStatement instanceof PlainSelect) { PlainSelect plainSelect = (PlainSelect) selectStatement; if (Objects.nonNull(plainSelect.getOrderByElements())) { @@ -564,15 +564,15 @@ public class SqlReplaceHelper { if (plainSelect.getFromItem() instanceof ParenthesedSelect) { ParenthesedSelect parenthesedSelect = (ParenthesedSelect) plainSelect.getFromItem(); parenthesedSelect - .setSelect(replaceAggAliasOrderItem(parenthesedSelect.getSelect())); + .setSelect(replaceAggAliasOrderbyField(parenthesedSelect.getSelect())); } return selectStatement; } return selectStatement; } - public static String replaceAggAliasOrderItem(String sql) { - Select selectStatement = replaceAggAliasOrderItem(SqlSelectHelper.getSelect(sql)); + public static String replaceAggAliasOrderbyField(String sql) { + Select selectStatement = replaceAggAliasOrderbyField(SqlSelectHelper.getSelect(sql)); return selectStatement.toString(); } diff --git a/common/src/main/java/com/tencent/supersonic/common/util/DateModeUtils.java b/common/src/main/java/com/tencent/supersonic/common/util/DateModeUtils.java index a5753c085..75ee5a597 100644 --- a/common/src/main/java/com/tencent/supersonic/common/util/DateModeUtils.java +++ b/common/src/main/java/com/tencent/supersonic/common/util/DateModeUtils.java @@ -4,6 +4,7 @@ import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.ItemDateResp; import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; +import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; @@ -32,14 +33,9 @@ import static com.tencent.supersonic.common.pojo.Constants.MONTH_FORMAT; @Data public class DateModeUtils { - @Value("${s2.query.parameter.sys.date:sys_imp_date}") - private String sysDateCol; - - @Value("${s2.query.parameter.sys.month:sys_imp_month}") - private String sysDateMonthCol; - - @Value("${s2.query.parameter.sys.month:sys_imp_week}") - private String sysDateWeekCol; + private final String sysDateCol = TimeDimensionEnum.DAY.getName(); + private final String sysDateMonthCol = TimeDimensionEnum.MONTH.getName(); + private final String sysDateWeekCol = TimeDimensionEnum.WEEK.getName(); @Value("${s2.query.parameter.sys.zipper.begin:start_}") private String sysZipperDateColBegin; diff --git a/common/src/test/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelperTest.java b/common/src/test/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelperTest.java index a641b2539..084978319 100644 --- a/common/src/test/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelperTest.java +++ b/common/src/test/java/com/tencent/supersonic/common/jsqlparser/SqlReplaceHelperTest.java @@ -325,10 +325,10 @@ class SqlReplaceHelperTest { } @Test - void testReplaceAggAliasOrderItem() { + void testReplaceAggAliasOrderbyField() { String sql = "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 " + "GROUP BY 部门 ORDER BY SUM(访问次数) DESC LIMIT 10) AS top10"; - String replaceSql = SqlReplaceHelper.replaceAggAliasOrderItem(sql); + String replaceSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql); Assert.assertEquals( "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 " + "GROUP BY 部门 ORDER BY 2 DESC LIMIT 10) AS top10", diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ParseSqlReq.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ParseSqlReq.java deleted file mode 100644 index ec4519030..000000000 --- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/ParseSqlReq.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.tencent.supersonic.headless.api.pojo.request; - -import com.tencent.supersonic.headless.api.pojo.MetricTable; -import lombok.Data; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@Data -public class ParseSqlReq { - private Map variables; - private String sql = ""; - private List tables; - private boolean supportWith = true; - private boolean withAlias = true; - - public Map getVariables() { - if (variables == null) { - variables = new HashMap<>(); - } - return variables; - } -} diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/QueryFilter.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/QueryFilter.java index b06814aef..446377a98 100644 --- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/QueryFilter.java +++ b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/request/QueryFilter.java @@ -34,12 +34,11 @@ public class QueryFilter implements Serializable { QueryFilter that = (QueryFilter) o; return Objects.equal(bizName, that.bizName) && Objects.equal(name, that.name) && operator == that.operator && Objects.equal(value, that.value) - && Objects.equal(elementID, that.elementID) && Objects.equal(function, that.function); } @Override public int hashCode() { - return Objects.hashCode(bizName, name, operator, value, elementID, function); + return Objects.hashCode(bizName, name, operator, value, function); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/DataSetQueryParam.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/DataSetQueryParam.java deleted file mode 100644 index e6f613651..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/DataSetQueryParam.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.tencent.supersonic.headless.core.pojo; - -import com.tencent.supersonic.headless.api.pojo.MetricTable; -import lombok.Data; - -import java.util.List; - -@Data -public class DataSetQueryParam { - private String sql = ""; - private List tables; - private boolean supportWith = true; - private boolean withAlias = true; -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/MetricQueryParam.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/MetricQueryParam.java deleted file mode 100644 index 2a3bc8b8b..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/MetricQueryParam.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.tencent.supersonic.headless.core.pojo; - -import com.tencent.supersonic.common.pojo.ColumnOrder; -import lombok.Data; - -import java.util.List; - -@Data -public class MetricQueryParam { - - private List metrics; - private List dimensions; - private String where; - private Long limit; - private List order; - private boolean nativeQuery = false; -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java index 33266cc7a..2a6ffd63e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java @@ -1,34 +1,25 @@ package com.tencent.supersonic.headless.core.pojo; -import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import lombok.Data; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Triple; -import java.util.List; - @Data public class QueryStatement { private Long dataSetId; - private List modelIds; private String sql; private String errMsg; - private QueryParam queryParam; - private MetricQueryParam metricQueryParam; - private DataSetQueryParam dataSetQueryParam; + private StructQueryParam structQueryParam; + private SqlQueryParam sqlQueryParam; + private OntologyQueryParam ontologyQueryParam; private Integer status = 0; private Boolean isS2SQL = false; - private List> timeRanges; private Boolean enableOptimize = true; private Triple minMaxTime; - private String dataSetSql; - private String dataSetAlias; - private String dataSetSimplifySql; - private Boolean enableLimitWrapper = false; private Ontology ontology; private SemanticSchemaResp semanticSchemaResp; private Integer limit = 1000; @@ -41,9 +32,4 @@ public class QueryStatement { public boolean isTranslated() { return isTranslated != null && isTranslated && isOk(); } - - public QueryStatement error(String msg) { - this.setErrMsg(msg); - return this; - } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/SqlQueryParam.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/SqlQueryParam.java new file mode 100644 index 000000000..086397e88 --- /dev/null +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/SqlQueryParam.java @@ -0,0 +1,12 @@ +package com.tencent.supersonic.headless.core.pojo; + +import lombok.Data; + +@Data +public class SqlQueryParam { + private String sql; + private String table; + private boolean supportWith = true; + private boolean withAlias = true; + private String simplifiedSql; +} diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/QueryParam.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/StructQueryParam.java similarity index 57% rename from headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/QueryParam.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/StructQueryParam.java index 401d18204..8fd129885 100644 --- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/QueryParam.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/StructQueryParam.java @@ -1,21 +1,18 @@ -package com.tencent.supersonic.headless.api.pojo; +package com.tencent.supersonic.headless.core.pojo; import com.tencent.supersonic.common.pojo.Aggregator; -import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.Filter; import com.tencent.supersonic.common.pojo.Order; import com.tencent.supersonic.common.pojo.enums.QueryType; +import com.tencent.supersonic.headless.api.pojo.Param; import lombok.Data; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; -import java.util.Set; @Data -public class QueryParam { - // struct +public class StructQueryParam { private List groups = new ArrayList(); private List aggregators = new ArrayList(); private List orders = new ArrayList(); @@ -24,17 +21,5 @@ public class QueryParam { private DateConf dateInfo; private Long limit = 2000L; private QueryType queryType; - private String s2SQL; - private String correctS2SQL; - private Long dataSetId; - private String dataSetName; - private Set modelIds = new HashSet<>(); private List params = new ArrayList<>(); - - // metric - private List metrics = new ArrayList(); - private List dimensions; - private String where; - private List order; - private boolean nativeQuery = false; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index a1f7a5f48..79bfb478c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -1,507 +1,97 @@ package com.tencent.supersonic.headless.core.translator; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; -import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper; -import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; -import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; -import com.tencent.supersonic.common.pojo.Aggregator; -import com.tencent.supersonic.common.pojo.Constants; -import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.common.pojo.enums.QueryType; -import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; -import com.tencent.supersonic.common.util.StringUtil; -import com.tencent.supersonic.headless.api.pojo.Measure; -import com.tencent.supersonic.headless.api.pojo.MetricTable; -import com.tencent.supersonic.headless.api.pojo.QueryParam; -import com.tencent.supersonic.headless.api.pojo.SchemaItem; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; -import com.tencent.supersonic.headless.api.pojo.enums.MetricType; -import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; -import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; -import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.ModelResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; -import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor; -import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory; -import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; +import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.utils.ComponentFactory; -import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; -import org.springframework.beans.BeanUtils; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; @Component @Slf4j public class DefaultSemanticTranslator implements SemanticTranslator { - @Autowired - private SqlGenerateUtils sqlGenerateUtils; - public void translate(QueryStatement queryStatement) { if (queryStatement.isTranslated()) { return; } - try { - preprocess(queryStatement); - parse(queryStatement); - optimize(queryStatement); + for (QueryConverter converter : ComponentFactory.getQueryConverters()) { + if (converter.accept(queryStatement)) { + log.debug("QueryConverter accept [{}]", converter.getClass().getName()); + converter.convert(queryStatement); + } + } + doOntologyParse(queryStatement); + + if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) { + queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql()); + } + if (StringUtils.isBlank(queryStatement.getSql())) { + throw new RuntimeException("parse exception: " + queryStatement.getErrMsg()); + } + if (!SqlSelectHelper.hasLimit(queryStatement.getSql())) { + queryStatement + .setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit()); + } + + for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) { + queryOptimizer.rewrite(queryStatement); + } } catch (Exception e) { queryStatement.setErrMsg(e.getMessage()); + log.error("Failed to translate semantic query [{}]", e); } } - private void parse(QueryStatement queryStatement) throws Exception { - QueryParam queryParam = queryStatement.getQueryParam(); - if (Objects.isNull(queryStatement.getDataSetQueryParam())) { - queryStatement.setDataSetQueryParam(new DataSetQueryParam()); - } - if (Objects.isNull(queryStatement.getMetricQueryParam())) { - queryStatement.setMetricQueryParam(new MetricQueryParam()); + private void doOntologyParse(QueryStatement queryStatement) throws Exception { + OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); + SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); + log.info("parse with ontology: [{}]", ontologyQueryParam); + ComponentFactory.getQueryParser().parse(queryStatement); + + if (!queryStatement.isOk()) { + throw new Exception(String.format("parse table [%s] error [%s]", + sqlQueryParam.getTable(), queryStatement.getErrMsg())); } - log.debug("SemanticConverter before [{}]", queryParam); - for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) { - if (headlessConverter.accept(queryStatement)) { - log.debug("SemanticConverter accept [{}]", headlessConverter.getClass().getName()); - headlessConverter.convert(queryStatement); + List> tables = new ArrayList<>(); + tables.add(Pair.of(sqlQueryParam.getTable(), queryStatement.getSql())); + if (sqlQueryParam.isSupportWith()) { + EngineType engineType = + EngineType.fromString(queryStatement.getOntology().getDatabase().getType()); + if (!SqlMergeWithUtils.hasWith(engineType, sqlQueryParam.getSql())) { + String withSql = "with " + tables.stream() + .map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight())) + .collect(Collectors.joining(",")) + "\n" + sqlQueryParam.getSql(); + queryStatement.setSql(withSql); + } else { + List parentTableList = + tables.stream().map(Pair::getLeft).collect(Collectors.toList()); + List parentSqlList = + tables.stream().map(Pair::getRight).collect(Collectors.toList()); + String mergeSql = SqlMergeWithUtils.mergeWith(engineType, sqlQueryParam.getSql(), + parentSqlList, parentTableList); + queryStatement.setSql(mergeSql); } - } - log.debug("SemanticConverter after {} {} {}", queryParam, - queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam()); - - if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) { - doParse(queryStatement.getDataSetQueryParam(), queryStatement); } else { - queryStatement.getMetricQueryParam() - .setNativeQuery(queryParam.getQueryType().isNativeAggQuery()); - doParse(queryStatement, - AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery())); - } - - if (StringUtils.isEmpty(queryStatement.getSql())) { - throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg()); - } - if (StringUtils.isNotBlank(queryStatement.getSql()) - && !SqlSelectHelper.hasLimit(queryStatement.getSql())) { - String querySql = - queryStatement.getSql() + " limit " + queryStatement.getLimit().toString(); - queryStatement.setSql(querySql); - } - } - - private QueryStatement doParse(DataSetQueryParam dataSetQueryParam, - QueryStatement queryStatement) { - log.info("parse dataSetQuery [{}] ", dataSetQueryParam); - Ontology ontology = queryStatement.getOntology(); - EngineType engineType = EngineType.fromString(ontology.getDatabase().getType()); - try { - if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) { - List tables = new ArrayList<>(); - boolean isSingleTable = dataSetQueryParam.getTables().size() == 1; - for (MetricTable metricTable : dataSetQueryParam.getTables()) { - QueryStatement tableSql = parserSql(metricTable, isSingleTable, - dataSetQueryParam, queryStatement); - if (isSingleTable && StringUtils.isNotBlank(tableSql.getDataSetSimplifySql())) { - queryStatement.setSql(tableSql.getDataSetSimplifySql()); - queryStatement.setDataSetQueryParam(dataSetQueryParam); - return queryStatement; - } - tables.add(new String[] {metricTable.getAlias(), tableSql.getSql()}); - } - if (!tables.isEmpty()) { - String sql; - if (dataSetQueryParam.isSupportWith()) { - if (!SqlMergeWithUtils.hasWith(engineType, dataSetQueryParam.getSql())) { - sql = "with " - + tables.stream() - .map(t -> String.format("%s as (%s)", t[0], t[1])) - .collect(Collectors.joining(",")) - + "\n" + dataSetQueryParam.getSql(); - } else { - List parentWithNameList = tables.stream().map(table -> table[0]) - .collect(Collectors.toList()); - List parentSqlList = tables.stream().map(table -> table[1]) - .collect(Collectors.toList()); - sql = SqlMergeWithUtils.mergeWith(engineType, - dataSetQueryParam.getSql(), parentSqlList, parentWithNameList); - } - } else { - sql = dataSetQueryParam.getSql(); - for (String[] tb : tables) { - sql = StringUtils.replace(sql, tb[0], "(" + tb[1] + ") " - + (dataSetQueryParam.isWithAlias() ? "" : tb[0]), -1); - } - } - queryStatement.setSql(sql); - queryStatement.setDataSetQueryParam(dataSetQueryParam); - return queryStatement; - } + String dsSql = sqlQueryParam.getSql(); + for (Pair tb : tables) { + dsSql = StringUtils.replace(dsSql, tb.getLeft(), "(" + tb.getRight() + ") " + + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1); } - } catch (Exception e) { - log.error("physicalSql error {}", e); - queryStatement.setErrMsg(e.getMessage()); - } - return queryStatement; - } - - private QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) { - MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam(); - log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg); - try { - ComponentFactory.getQueryParser().parse(queryStatement, isAgg); - } catch (Exception e) { - queryStatement.setErrMsg(e.getMessage()); - log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e); - } - return queryStatement; - } - - private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable, - DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception { - MetricQueryParam metricQueryParam = new MetricQueryParam(); - metricQueryParam.setMetrics(metricTable.getMetrics()); - metricQueryParam.setDimensions(metricTable.getDimensions()); - metricQueryParam.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere())); - metricQueryParam.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption())); - - QueryStatement tableSql = new QueryStatement(); - tableSql.setIsS2SQL(false); - tableSql.setMetricQueryParam(metricQueryParam); - tableSql.setMinMaxTime(queryStatement.getMinMaxTime()); - tableSql.setEnableOptimize(queryStatement.getEnableOptimize()); - tableSql.setDataSetId(queryStatement.getDataSetId()); - tableSql.setOntology(queryStatement.getOntology()); - if (isSingleMetricTable) { - tableSql.setDataSetSql(dataSetQueryParam.getSql()); - tableSql.setDataSetAlias(metricTable.getAlias()); - } - tableSql = doParse(tableSql, metricTable.getAggOption()); - if (!tableSql.isOk()) { - throw new Exception(String.format("parser table [%s] error [%s]", - metricTable.getAlias(), tableSql.getErrMsg())); - } - return tableSql; - } - - private void optimize(QueryStatement queryStatement) { - for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) { - queryOptimizer.rewrite(queryStatement); + queryStatement.setSql(dsSql); } } - private void preprocess(QueryStatement queryStatement) { - if (StringUtils.isBlank(queryStatement.getSql())) { - return; - } - SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - - convertNameToBizName(queryStatement); - rewriteFunction(queryStatement); - queryStatement.setSql(SqlRemoveHelper.removeUnderscores(queryStatement.getSql())); - - String tableName = SqlSelectHelper.getTableName(queryStatement.getSql()); - if (StringUtils.isEmpty(tableName)) { - return; - } - // correct order item is same as agg alias - String reqSql = queryStatement.getSql(); - queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(queryStatement.getSql())); - log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql()); - // 5.build MetricTables - List allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql()); - List metricSchemas = getMetrics(semanticSchemaResp, allFields); - List metrics = - metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); - Set dimensions = getDimensions(semanticSchemaResp, allFields); - QueryStructReq queryStructReq = new QueryStructReq(); - - MetricTable metricTable = new MetricTable(); - metricTable.getMetrics().addAll(metrics); - metricTable.getDimensions().addAll(dimensions); - metricTable.setAlias(tableName.toLowerCase()); - // if metric empty , fill model default - if (CollectionUtils.isEmpty(metricTable.getMetrics())) { - metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( - getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); - } else { - queryStructReq.getAggregators() - .addAll(metricTable.getMetrics().stream() - .map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN)) - .collect(Collectors.toList())); - } - AggOption aggOption = getAggOption(queryStatement, metricSchemas); - metricTable.setAggOption(aggOption); - List tables = new ArrayList<>(); - tables.add(metricTable); - - // 6.build ParseSqlReq - DataSetQueryParam datasetQueryParam = new DataSetQueryParam(); - datasetQueryParam.setTables(tables); - datasetQueryParam.setSql(queryStatement.getSql()); - DatabaseResp database = semanticSchemaResp.getDatabaseResp(); - if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()), - database.getVersion())) { - datasetQueryParam.setSupportWith(false); - datasetQueryParam.setWithAlias(false); - } - - // 7. do deriveMetric - generateDerivedMetric(semanticSchemaResp, aggOption, datasetQueryParam); - - // 8.physicalSql by ParseSqlReq - // queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(queryStatement.getSql())); - queryStructReq.setDataSetId(queryStatement.getDataSetId()); - queryStructReq.setQueryType(getQueryType(aggOption)); - log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq); - QueryParam queryParam = new QueryParam(); - BeanUtils.copyProperties(queryStructReq, queryParam); - queryStatement.setQueryParam(queryParam); - queryStatement.setDataSetQueryParam(datasetQueryParam); - // queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq)); - } - - private AggOption getAggOption(QueryStatement queryStatement, - List metricSchemas) { - String sql = queryStatement.getSql(); - if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) - && !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) { - log.debug("getAggOption simple sql set to DEFAULT"); - return AggOption.DEFAULT; - } - // if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE" - // if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE" - if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) - || SqlSelectFunctionHelper.hasFunction(sql, "count") - || SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) { - return AggOption.OUTER; - } - // if (queryStatement.isInnerLayerNative()) { - // return AggOption.NATIVE; - // } - if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql) - || SqlSelectHelper.hasGroupBy(sql)) { - return AggOption.OUTER; - } - long defaultAggNullCnt = metricSchemas.stream().filter( - m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg())) - .count(); - if (defaultAggNullCnt > 0) { - log.debug("getAggOption find null defaultAgg metric set to NATIVE"); - return AggOption.OUTER; - } - return AggOption.DEFAULT; - } - - private void convertNameToBizName(QueryStatement queryStatement) { - SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - Map fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); - String sql = queryStatement.getSql(); - log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(), - sql); - sql = SqlReplaceHelper.replaceSqlByPositions(sql); - log.debug("replaceSqlByPositions:{}", sql); - sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); - log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(), - sql); - sql = SqlReplaceHelper.replaceTable(sql, - Constants.TABLE_PREFIX + queryStatement.getDataSetId()); - log.debug("replaceTableName after:{}", sql); - queryStatement.setSql(sql); - } - - private Set getDimensions(SemanticSchemaResp semanticSchemaResp, - List allFields) { - Map dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream() - .collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), - SchemaItem::getBizName, (k1, k2) -> k1)); - dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(), - TimeDimensionEnum.DAY.getName()); - return allFields.stream() - .filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase())) - .map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase())) - .collect(Collectors.toSet()); - } - - private List getMetrics(SemanticSchemaResp semanticSchemaResp, - List allFields) { - Map metricLowerToNameMap = - semanticSchemaResp.getMetrics().stream().collect(Collectors - .toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry)); - return allFields.stream() - .filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase())) - .map(entry -> metricLowerToNameMap.get(entry.toLowerCase())) - .collect(Collectors.toList()); - } - - private void rewriteFunction(QueryStatement queryStatement) { - SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - DatabaseResp database = semanticSchemaResp.getDatabaseResp(); - if (Objects.isNull(database) || Objects.isNull(database.getType())) { - return; - } - String type = database.getType(); - DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase()); - if (Objects.nonNull(engineAdaptor)) { - String functionNameCorrector = - engineAdaptor.functionNameCorrector(queryStatement.getSql()); - queryStatement.setSql(functionNameCorrector); - } - } - - protected Map getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) { - // support fieldName and field alias to bizName - Map dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap( - entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); - - Map metricResults = semanticSchemaResp.getMetrics().stream().flatMap( - entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); - - dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap()); - dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap()); - dimensionResults.putAll(metricResults); - return dimensionResults; - } - - private Stream> getPairStream(String aliasStr, String name, - String bizName) { - Set> elements = new HashSet<>(); - elements.add(Pair.of(name, bizName)); - if (StringUtils.isNotBlank(aliasStr)) { - List aliasList = SchemaItem.getAliasList(aliasStr); - for (String alias : aliasList) { - elements.add(Pair.of(alias, bizName)); - } - } - return elements.stream(); - } - - private QueryType getQueryType(AggOption aggOption) { - boolean isAgg = AggOption.isAgg(aggOption); - QueryType queryType = QueryType.DETAIL; - if (isAgg) { - queryType = QueryType.AGGREGATE; - } - return queryType; - } - - private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption, - DataSetQueryParam viewQueryParam) { - String sql = viewQueryParam.getSql(); - for (MetricTable metricTable : viewQueryParam.getTables()) { - Set measures = new HashSet<>(); - Map replaces = generateDerivedMetric(semanticSchemaResp, aggOption, - metricTable.getMetrics(), metricTable.getDimensions(), measures); - - if (!CollectionUtils.isEmpty(replaces)) { - // metricTable sql use measures replace metric - sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces); - metricTable.setAggOption(AggOption.NATIVE); - // metricTable use measures replace metric - if (!CollectionUtils.isEmpty(measures)) { - metricTable.setMetrics(new ArrayList<>(measures)); - } else { - // empty measure , fill default - metricTable.setMetrics(new ArrayList<>()); - metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( - getDefaultModel(semanticSchemaResp, metricTable.getDimensions()))); - } - } - } - viewQueryParam.setSql(sql); - } - - private Map generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, - AggOption aggOption, List metrics, List dimensions, - Set measures) { - Map result = new HashMap<>(); - List metricResps = semanticSchemaResp.getMetrics(); - List dimensionResps = semanticSchemaResp.getDimensions(); - - // Check if any metric is derived - boolean hasDerivedMetrics = - metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType - .isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams())); - if (!hasDerivedMetrics) { - return result; - } - - log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics); - - Set allFields = new HashSet<>(); - Map allMeasures = new HashMap<>(); - semanticSchemaResp.getModelResps().forEach(modelResp -> { - allFields.addAll(modelResp.getFieldList()); - if (modelResp.getModelDetail().getMeasures() != null) { - modelResp.getModelDetail().getMeasures() - .forEach(measure -> allMeasures.put(measure.getBizName(), measure)); - } - }); - - Set derivedDimensions = new HashSet<>(); - Set derivedMetrics = new HashSet<>(); - Map visitedMetrics = new HashMap<>(); - - for (MetricResp metricResp : metricResps) { - if (metrics.contains(metricResp.getBizName())) { - boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(), - metricResp.getMetricDefineByMeasureParams()); - if (isDerived) { - String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields, - allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp), - metricResp.getMetricDefineType(), aggOption, visitedMetrics, - derivedMetrics, derivedDimensions); - result.put(metricResp.getBizName(), expr); - log.debug("derived metric {}->{}", metricResp.getBizName(), expr); - } else { - measures.add(metricResp.getBizName()); - } - } - } - - measures.addAll(derivedMetrics); - derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension)) - .forEach(dimensions::add); - - return result; - } - - private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List dimensions) { - if (!CollectionUtils.isEmpty(dimensions)) { - Map modelMatchCnt = new HashMap<>(); - for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { - modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions() - .stream().filter(d -> dimensions.contains(d.getBizName())).count()); - } - return modelMatchCnt.entrySet().stream() - .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) - .map(m -> m.getKey()).findFirst().orElse(""); - } - return semanticSchemaResp.getModelResps().get(0).getBizName(); - } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DetailQueryOptimizer.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DetailQueryOptimizer.java index bf84cbb2b..3a30a91f9 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DetailQueryOptimizer.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DetailQueryOptimizer.java @@ -1,14 +1,12 @@ package com.tencent.supersonic.headless.core.translator; -import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; import java.util.Objects; -import java.util.stream.Collectors; /** Remove the default metric added by the system when the query only has dimensions */ @Slf4j @@ -17,26 +15,26 @@ public class DetailQueryOptimizer implements QueryOptimizer { @Override public void rewrite(QueryStatement queryStatement) { - QueryParam queryParam = queryStatement.getQueryParam(); + StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); String sqlRaw = queryStatement.getSql().trim(); if (StringUtils.isEmpty(sqlRaw)) { throw new RuntimeException("sql is empty or null"); } log.debug("before handleNoMetric, sql:{}", sqlRaw); - if (isDetailQuery(queryParam)) { - if (queryParam.getMetrics().size() == 0 - && !CollectionUtils.isEmpty(queryParam.getGroups())) { - String sqlForm = "select %s from ( %s ) src_no_metric"; - String sql = String.format(sqlForm, - queryParam.getGroups().stream().collect(Collectors.joining(",")), sqlRaw); - queryStatement.setSql(sql); - } - } + // if (isDetailQuery(structQueryParam)) { + // if (!CollectionUtils.isEmpty(structQueryParam.getGroups())) { + // String sqlForm = "select %s from ( %s ) src_no_metric"; + // String sql = String.format(sqlForm, + // structQueryParam.getGroups().stream().collect(Collectors.joining(",")), + // sqlRaw); + // queryStatement.setSql(sql); + // } + // } log.debug("after handleNoMetric, sql:{}", queryStatement.getSql()); } - public boolean isDetailQuery(QueryParam queryParam) { - return Objects.nonNull(queryParam) && queryParam.getQueryType().isNativeAggQuery() - && CollectionUtils.isEmpty(queryParam.getMetrics()); + public boolean isDetailQuery(StructQueryParam structQueryParam) { + return Objects.nonNull(structQueryParam) + && structQueryParam.getQueryType().isNativeAggQuery(); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/QueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/QueryParser.java index b15212c65..344335af0 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/QueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/QueryParser.java @@ -1,9 +1,8 @@ package com.tencent.supersonic.headless.core.translator; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.QueryStatement; /** A query parser generates physical SQL for the QueryStatement. */ public interface QueryParser { - void parse(QueryStatement queryStatement, AggOption aggOption) throws Exception; + void parse(QueryStatement queryStatement) throws Exception; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index ce0d27a11..8a787b92c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -1,6 +1,5 @@ package com.tencent.supersonic.headless.core.translator.calcite; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.QueryParser; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; @@ -16,7 +15,7 @@ import org.springframework.stereotype.Component; public class CalciteQueryParser implements QueryParser { @Override - public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception { + public void parse(QueryStatement queryStatement) throws Exception { Ontology ontology = queryStatement.getOntology(); if (ontology == null) { queryStatement.setErrMsg("No ontology could be found"); @@ -29,7 +28,7 @@ public class CalciteQueryParser implements QueryParser { .enableOptimize(queryStatement.getEnableOptimize()).build()) .build(); SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema); - sqlBuilder.build(queryStatement, isAgg); + sqlBuilder.build(queryStatement); } } diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/MetricTable.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java similarity index 57% rename from headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/MetricTable.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java index 9740891de..b399eb486 100644 --- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/MetricTable.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java @@ -1,17 +1,19 @@ -package com.tencent.supersonic.headless.api.pojo; +package com.tencent.supersonic.headless.core.translator.calcite.s2sql; import com.google.common.collect.Lists; +import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import lombok.Data; import java.util.List; @Data -public class MetricTable { - - private String alias; +public class OntologyQueryParam { private List metrics = Lists.newArrayList(); private List dimensions = Lists.newArrayList(); private String where; + private Long limit; + private List order; + private boolean nativeQuery = false; private AggOption aggOption = AggOption.DEFAULT; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index 971886ed4..6477245f9 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -1,14 +1,13 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql; import com.tencent.supersonic.common.calcite.Configuration; -import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.Database; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender; @@ -17,23 +16,16 @@ import com.tencent.supersonic.headless.core.translator.calcite.sql.render.Render import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.validate.SqlValidatorScope; -import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.ListIterator; -import java.util.Objects; +import java.util.*; -/** parsing from query dimensions and metrics */ @Slf4j public class SqlBuilder { private final S2CalciteSchema schema; - private MetricQueryParam metricQueryParam; + private OntologyQueryParam ontologyQueryParam; private SqlValidatorScope scope; private SqlNode parserNode; private boolean isAgg = false; @@ -43,45 +35,32 @@ public class SqlBuilder { this.schema = schema; } - public void build(QueryStatement queryStatement, AggOption aggOption) throws Exception { - this.metricQueryParam = queryStatement.getMetricQueryParam(); - if (metricQueryParam.getMetrics() == null) { - metricQueryParam.setMetrics(new ArrayList<>()); + public void build(QueryStatement queryStatement) throws Exception { + this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); + if (ontologyQueryParam.getMetrics() == null) { + ontologyQueryParam.setMetrics(new ArrayList<>()); } - if (metricQueryParam.getDimensions() == null) { - metricQueryParam.setDimensions(new ArrayList<>()); + if (ontologyQueryParam.getDimensions() == null) { + ontologyQueryParam.setDimensions(new ArrayList<>()); } - if (metricQueryParam.getLimit() == null) { - metricQueryParam.setLimit(0L); + if (ontologyQueryParam.getLimit() == null) { + ontologyQueryParam.setLimit(0L); } - this.aggOption = aggOption; + this.aggOption = ontologyQueryParam.getAggOption(); buildParseNode(); Database database = queryStatement.getOntology().getDatabase(); EngineType engineType = EngineType.fromString(database.getType()); optimizeParseNode(engineType); String sql = getSql(engineType); - queryStatement.setSql(sql); - if (Objects.nonNull(queryStatement.getEnableOptimize()) - && queryStatement.getEnableOptimize() - && Objects.nonNull(queryStatement.getDataSetAlias()) - && !queryStatement.getDataSetAlias().isEmpty()) { - // simplify model sql with query sql - String simplifySql = rewrite(getSqlByDataSet(engineType, sql, - queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType); - if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) { - log.debug("simplifySql [{}]", simplifySql); - queryStatement.setDataSetSimplifySql(simplifySql); - } - } } private void buildParseNode() throws Exception { // find the match Datasource scope = SchemaBuilder.getScope(schema); List dataModels = - DataModelNode.getRelatedDataModels(scope, schema, metricQueryParam); + DataModelNode.getRelatedDataModels(scope, schema, ontologyQueryParam); if (dataModels == null || dataModels.isEmpty()) { throw new Exception("data model not found"); } @@ -98,14 +77,14 @@ public class SqlBuilder { while (it.hasNext()) { Renderer renderer = it.next(); if (previous != null) { - previous.render(metricQueryParam, dataModels, scope, schema, !isAgg); + previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg); renderer.setTable(previous .builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i))); i++; } previous = renderer; } - builders.getLast().render(metricQueryParam, dataModels, scope, schema, !isAgg); + builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg); parserNode = builders.getLast().builder(); } @@ -116,7 +95,7 @@ public class SqlBuilder { // default by dataModel time aggregation if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { - if (!metricQueryParam.isNativeQuery()) { + if (!ontologyQueryParam.isNativeQuery()) { return true; } } @@ -164,13 +143,4 @@ public class SqlBuilder { } } - private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql, - String parentAlias) throws SqlParseException { - if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) { - return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql); - } - return SqlMergeWithUtils.mergeWith(engineType, dataSetSql, - Collections.singletonList(parentSql), Collections.singletonList(parentAlias)); - } - } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index 73c5c422e..a648b4122 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -4,13 +4,13 @@ import com.google.common.collect.Lists; import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder; import lombok.extern.slf4j.Slf4j; @@ -150,7 +150,7 @@ public class DataModelNode extends SemanticNode { } public static void getQueryDimensionMeasure(S2CalciteSchema schema, - MetricQueryParam metricCommand, Set queryDimension, List measures) { + OntologyQueryParam metricCommand, Set queryDimension, List measures) { queryDimension.addAll(metricCommand.getDimensions().stream() .map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] @@ -166,7 +166,7 @@ public class DataModelNode extends SemanticNode { } public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, - MetricQueryParam metricCommand, Set queryDimension, List measures, + OntologyQueryParam metricCommand, Set queryDimension, List measures, SqlValidatorScope scope) throws Exception { EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { @@ -192,7 +192,7 @@ public class DataModelNode extends SemanticNode { } public static List getRelatedDataModels(SqlValidatorScope scope, - S2CalciteSchema schema, MetricQueryParam metricCommand) throws Exception { + S2CalciteSchema schema, OntologyQueryParam metricCommand) throws Exception { List dataModels = new ArrayList<>(); // check by metric @@ -208,7 +208,7 @@ public class DataModelNode extends SemanticNode { sourceMeasure.retainAll(measures); dataSourceMeasures.put(entry.getKey(), sourceMeasure.size()); } - log.info("dataSourceMeasures [{}]", dataSourceMeasures); + log.info("metrics: [{}]", dataSourceMeasures); Optional> base = dataSourceMeasures.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); if (base.isPresent()) { @@ -267,7 +267,7 @@ public class DataModelNode extends SemanticNode { } private static boolean checkMatch(Set sourceMeasure, Set queryDimension, - List measures, Set dimension, MetricQueryParam metricCommand, + List measures, Set dimension, OntologyQueryParam metricCommand, SqlValidatorScope scope, EngineType engineType) throws Exception { boolean isAllMatch = true; sourceMeasure.retainAll(measures); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java index fd058739a..6ad65aa1b 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java @@ -1,10 +1,10 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; @@ -26,7 +26,7 @@ import java.util.stream.Collectors; public class FilterRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataModels, + public void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView tableView = super.tableView; SqlNode filterNode = null; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index a74b85934..5d04eef8f 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -1,7 +1,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; @@ -9,6 +8,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode; @@ -47,7 +47,7 @@ import java.util.stream.Collectors; public class JoinRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataModels, + public void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricCommand.getWhere(); EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java index ef20426f9..9a516cefc 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java @@ -2,8 +2,8 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; @@ -22,7 +22,7 @@ import java.util.List; public class OutputRender extends Renderer { @Override - public void render(MetricQueryParam metricCommand, List dataModels, + public void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView selectDataSet = super.tableView; EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java index d4322a411..c3466b540 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/Renderer.java @@ -1,12 +1,12 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode; @@ -114,6 +114,6 @@ public abstract class Renderer { return SemanticNode.buildAs(alias, tableView.build()); } - public abstract void render(MetricQueryParam metricCommand, List dataModels, + public abstract void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception; } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index c4eb0d24c..19fea0587 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -1,7 +1,6 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; @@ -9,6 +8,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; @@ -336,9 +336,9 @@ public class SourceRender extends Renderer { } } - public void render(MetricQueryParam metricQueryParam, List dataModels, + public void render(OntologyQueryParam ontologyQueryParam, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { - String queryWhere = metricQueryParam.getWhere(); + String queryWhere = ontologyQueryParam.getWhere(); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); @@ -349,13 +349,13 @@ public class SourceRender extends Renderer { } if (dataModels.size() == 1) { DataModel dataModel = dataModels.get(0); - super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(), - metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataModel, scope, - schema, nonAgg); + super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(), + ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel, + scope, schema, nonAgg); return; } JoinRender joinRender = new JoinRender(); - joinRender.render(metricQueryParam, dataModels, scope, schema, nonAgg); + joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg); super.tableView = joinRender.getTableView(); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java index 54b24dd84..8d44f7750 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/DefaultDimValueConverter.java @@ -4,7 +4,6 @@ import com.google.common.collect.Lists; import com.tencent.supersonic.common.jsqlparser.SqlAddHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; -import com.tencent.supersonic.headless.api.pojo.MetricTable; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import lombok.extern.slf4j.Slf4j; @@ -28,8 +27,8 @@ public class DefaultDimValueConverter implements QueryConverter { @Override public boolean accept(QueryStatement queryStatement) { - return !Objects.isNull(queryStatement.getDataSetQueryParam()) - && !StringUtils.isBlank(queryStatement.getDataSetQueryParam().getSql()); + return Objects.nonNull(queryStatement.getSqlQueryParam()) + && StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql()); } @Override @@ -40,15 +39,13 @@ public class DefaultDimValueConverter implements QueryConverter { if (CollectionUtils.isEmpty(dimensions)) { return; } - String sql = queryStatement.getDataSetQueryParam().getSql(); + String sql = queryStatement.getSqlQueryParam().getSql(); List whereFields = SqlSelectHelper.getWhereFields(sql).stream() .filter(field -> !TimeDimensionEnum.containsTimeDimension(field)) .collect(Collectors.toList()); if (!CollectionUtils.isEmpty(whereFields)) { return; } - MetricTable metricTable = - queryStatement.getDataSetQueryParam().getTables().stream().findFirst().orElse(null); List expressions = Lists.newArrayList(); for (Dimension dimension : dimensions) { ExpressionList expressionList = new ExpressionList(); @@ -59,11 +56,11 @@ public class DefaultDimValueConverter implements QueryConverter { inExpression.setLeftExpression(new Column(dimension.getBizName())); inExpression.setRightExpression(expressionList); expressions.add(inExpression); - if (metricTable != null) { - metricTable.getDimensions().add(dimension.getBizName()); + if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) { + queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName()); } } sql = SqlAddHelper.addWhere(sql, expressions); - queryStatement.getDataSetQueryParam().setSql(sql); + queryStatement.getSqlQueryParam().setSql(sql); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java similarity index 50% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java index e0287e427..285ea655b 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/CalculateAggConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java @@ -6,82 +6,46 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.DateModeUtils; -import com.tencent.supersonic.headless.api.pojo.MetricTable; -import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; -import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam; import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; +import com.tencent.supersonic.headless.core.pojo.StructQueryParam; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -/** supplement the QueryStatement when query with custom aggregation method */ @Component("CalculateAggConverter") @Slf4j -public class CalculateAggConverter implements QueryConverter { +public class MetricRatioConverter implements QueryConverter { public interface EngineSql { - String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql); - } - - public DataSetQueryParam generateSqlCommend(QueryStatement queryStatement, - EngineType engineTypeEnum, String version) throws Exception { - SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); - QueryParam queryParam = queryStatement.getQueryParam(); - // 同环比 - if (isRatioAccept(queryParam)) { - return generateRatioSqlCommand(queryStatement, engineTypeEnum, version); - } - DataSetQueryParam sqlCommand = new DataSetQueryParam(); - String metricTableName = "v_metric_tb_tmp"; - MetricTable metricTable = new MetricTable(); - metricTable.setAlias(metricTableName); - metricTable.setMetrics(queryParam.getMetrics()); - metricTable.setDimensions(queryParam.getGroups()); - String where = sqlGenerateUtils.generateWhere(queryParam, null); - log.info("in generateSqlCommand, complete where:{}", where); - metricTable.setWhere(where); - metricTable.setAggOption(AggOption.AGGREGATION); - sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable))); - String sql = String.format("select %s from %s %s %s %s", - sqlGenerateUtils.getSelect(queryParam), metricTableName, - sqlGenerateUtils.getGroupBy(queryParam), sqlGenerateUtils.getOrderBy(queryParam), - sqlGenerateUtils.getLimit(queryParam)); - if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) { - sqlCommand.setSupportWith(false); - sql = String.format("select %s from %s t0 %s %s %s", - sqlGenerateUtils.getSelect(queryParam), metricTableName, - sqlGenerateUtils.getGroupBy(queryParam), - sqlGenerateUtils.getOrderBy(queryParam), sqlGenerateUtils.getLimit(queryParam)); - } - sqlCommand.setSql(sql); - return sqlCommand; + String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, + String metricSql); } @Override public boolean accept(QueryStatement queryStatement) { - if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) { + if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL() + || !isRatioAccept(queryStatement.getStructQueryParam())) { return false; } - QueryParam queryParam = queryStatement.getQueryParam(); - if (queryParam.getQueryType().isNativeAggQuery()) { - return false; - } - if (CollectionUtils.isEmpty(queryParam.getAggregators())) { + StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); + if (structQueryParam.getQueryType().isNativeAggQuery() + || CollectionUtils.isEmpty(structQueryParam.getAggregators())) { return false; } int nonSumFunction = 0; - for (Aggregator agg : queryParam.getAggregators()) { + for (Aggregator agg : structQueryParam.getAggregators()) { if (agg.getFunc() == null || "".equals(agg.getFunc())) { return false; } @@ -98,14 +62,13 @@ public class CalculateAggConverter implements QueryConverter { @Override public void convert(QueryStatement queryStatement) throws Exception { Database database = queryStatement.getOntology().getDatabase(); - DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement, - EngineType.fromString(database.getType().toUpperCase()), database.getVersion()); - queryStatement.setDataSetQueryParam(dataSetQueryParam); + generateRatioSql(queryStatement, EngineType.fromString(database.getType().toUpperCase()), + database.getVersion()); } /** Ratio */ - public boolean isRatioAccept(QueryParam queryParam) { - Long ratioFuncNum = queryParam.getAggregators().stream() + public boolean isRatioAccept(StructQueryParam structQueryParam) { + Long ratioFuncNum = structQueryParam.getAggregators().stream() .filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) || f.getFunc().equals(AggOperatorEnum.RATIO_OVER))) .count(); @@ -115,53 +78,47 @@ public class CalculateAggConverter implements QueryConverter { return false; } - public DataSetQueryParam generateRatioSqlCommand(QueryStatement queryStatement, - EngineType engineTypeEnum, String version) throws Exception { + public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum, + String version) throws Exception { SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); - QueryParam queryParam = queryStatement.getQueryParam(); - check(queryParam); + StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); + check(structQueryParam); queryStatement.setEnableOptimize(false); - DataSetQueryParam sqlCommand = new DataSetQueryParam(); + OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); + ontologyQueryParam.setAggOption(AggOption.AGGREGATION); String metricTableName = "v_metric_tb_tmp"; - MetricTable metricTable = new MetricTable(); - metricTable.setAlias(metricTableName); - metricTable.setMetrics(queryParam.getMetrics()); - metricTable.setDimensions(queryParam.getGroups()); - String where = sqlGenerateUtils.generateWhere(queryParam, null); - log.info("in generateSqlCommend, complete where:{}", where); - metricTable.setWhere(where); - metricTable.setAggOption(AggOption.AGGREGATION); - sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable))); - boolean isOver = isOverRatio(queryParam); + boolean isOver = isOverRatio(structQueryParam); String sql = ""; + + SqlQueryParam dsParam = queryStatement.getSqlQueryParam(); + dsParam.setTable(metricTableName); switch (engineTypeEnum) { case H2: - sql = new H2EngineSql().sql(queryParam, isOver, true, metricTableName); + sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName); break; case MYSQL: case DORIS: case CLICKHOUSE: if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) { - sqlCommand.setSupportWith(false); + dsParam.setSupportWith(false); } if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) { - sql = new MysqlEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), - metricTableName); + sql = new MysqlEngineSql().sql(structQueryParam, isOver, + dsParam.isSupportWith(), metricTableName); } else { - sql = new CkEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), + sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(), metricTableName); } break; default: } - sqlCommand.setSql(sql); - return sqlCommand; + dsParam.setSql(sql); } public class H2EngineSql implements EngineSql { - public String getOverSelect(QueryParam queryParam, boolean isOver) { - String aggStr = queryParam.getAggregators().stream().map(f -> { + public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { + String aggStr = structQueryParam.getAggregators().stream().map(f -> { if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s", @@ -171,43 +128,44 @@ public class CalculateAggConverter implements QueryConverter { return f.getColumn(); } }).collect(Collectors.joining(",")); - return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr - : String.join(",", queryParam.getGroups()) + "," + aggStr; + return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr + : String.join(",", structQueryParam.getGroups()) + "," + aggStr; } - public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) { - if (Objects.nonNull(queryParam.getDateInfo())) { + public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, + boolean isAdd) { + if (Objects.nonNull(structQueryParam.getDateInfo())) { String addStr = isAdd ? "" : "-"; - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { return "day," + (isOver ? addStr + "7" : addStr + "1"); } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { return isOver ? "month," + addStr + "1" : "day," + addStr + "7"; } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) { return isOver ? "year," + addStr + "1" : "month," + addStr + "1"; } } return ""; } - public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, + public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, String aliasRight) { - String timeDim = getTimeDim(queryParam); - String timeSpan = getTimeSpan(queryParam, isOver, true); - String aggStr = queryParam.getAggregators().stream().map(f -> { + String timeDim = getTimeDim(structQueryParam); + String timeSpan = getTimeSpan(structQueryParam, isOver, true); + String aggStr = structQueryParam.getAggregators().stream().map(f -> { if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { return String.format( "%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ", aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim); } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) && isOver) { return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ", - getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim, + getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, aliasRight + timeDim); } return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan, @@ -217,7 +175,7 @@ public class CalculateAggConverter implements QueryConverter { } }).collect(Collectors.joining(" and ")); List groups = new ArrayList<>(); - for (String group : queryParam.getGroups()) { + for (String group : structQueryParam.getGroups()) { if (group.equalsIgnoreCase(timeDim)) { continue; } @@ -228,35 +186,36 @@ public class CalculateAggConverter implements QueryConverter { } @Override - public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { + public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, + String metricSql) { String sql = String.format( "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", - getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), - getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, - getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), - getLimit(queryParam)); + getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), + getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, + getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), + getLimit(structQueryParam)); return sql; } } public class CkEngineSql extends MysqlEngineSql { - public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, + public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, String aliasRight) { - String timeDim = getTimeDim(queryParam); - String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true); - String aggStr = queryParam.getAggregators().stream().map(f -> { + String timeDim = getTimeDim(structQueryParam); + String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); + String aggStr = structQueryParam.getAggregators().stream().map(f -> { if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { return String.format( "toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ", aliasLeft + timeDim, aliasRight + timeDim, timeSpan); } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) && isOver) { return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s", - aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), + aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasRight + timeDim); } return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, @@ -266,7 +225,7 @@ public class CalculateAggConverter implements QueryConverter { } }).collect(Collectors.joining(" and ")); List groups = new ArrayList<>(); - for (String group : queryParam.getGroups()) { + for (String group : structQueryParam.getGroups()) { if (group.equalsIgnoreCase(timeDim)) { continue; } @@ -277,45 +236,49 @@ public class CalculateAggConverter implements QueryConverter { } @Override - public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { + public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, + String metricSql) { if (!asWith) { return String.format( "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", - getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), - getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, - getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), - getLimit(queryParam)); + getOverSelect(structQueryParam, isOver), + getAllSelect(structQueryParam, "t0."), + getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, + getJoinOn(structQueryParam, isOver, "t0.", "t1."), + getOrderBy(structQueryParam), getLimit(structQueryParam)); } return String.format( ",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s " + "from t0 left join t1 on %s ) metric_tb_src %s %s ", - metricSql, metricSql, getOverSelect(queryParam, isOver), - getAllSelect(queryParam, "t0."), getAllJoinSelect(queryParam, "t1."), - getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), - getLimit(queryParam)); + metricSql, metricSql, getOverSelect(structQueryParam, isOver), + getAllSelect(structQueryParam, "t0."), + getAllJoinSelect(structQueryParam, "t1."), + getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), + getLimit(structQueryParam)); } } public class MysqlEngineSql implements EngineSql { - public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) { - if (Objects.nonNull(queryParam.getDateInfo())) { + public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, + boolean isAdd) { + if (Objects.nonNull(structQueryParam.getDateInfo())) { String addStr = isAdd ? "" : "-"; - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { return isOver ? addStr + "7 day" : addStr + "1 day"; } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) { return isOver ? addStr + "1 month" : addStr + "7 day"; } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { return isOver ? addStr + "1 year" : addStr + "1 month"; } } return ""; } - public String getOverSelect(QueryParam queryParam, boolean isOver) { - String aggStr = queryParam.getAggregators().stream().map(f -> { + public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { + String aggStr = structQueryParam.getAggregators().stream().map(f -> { if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s", @@ -325,26 +288,26 @@ public class CalculateAggConverter implements QueryConverter { return f.getColumn(); } }).collect(Collectors.joining(",")); - return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr - : String.join(",", queryParam.getGroups()) + "," + aggStr; + return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr + : String.join(",", structQueryParam.getGroups()) + "," + aggStr; } - public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, + public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, String aliasRight) { - String timeDim = getTimeDim(queryParam); - String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true); - String aggStr = queryParam.getAggregators().stream().map(f -> { + String timeDim = getTimeDim(structQueryParam); + String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); + String aggStr = structQueryParam.getAggregators().stream().map(f -> { if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { return String.format( "%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ", aliasLeft + timeDim, aliasRight + timeDim, timeSpan); } - if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) + if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) && isOver) { return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s", - aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), + aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasRight + timeDim); } return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, @@ -354,7 +317,7 @@ public class CalculateAggConverter implements QueryConverter { } }).collect(Collectors.joining(" and ")); List groups = new ArrayList<>(); - for (String group : queryParam.getGroups()) { + for (String group : structQueryParam.getGroups()) { if (group.equalsIgnoreCase(timeDim)) { continue; } @@ -365,51 +328,53 @@ public class CalculateAggConverter implements QueryConverter { } @Override - public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { + public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, + String metricSql) { String sql = String.format( "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", - getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), - getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, - getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), - getLimit(queryParam)); + getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), + getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, + getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), + getLimit(structQueryParam)); return sql; } } - private String getAllJoinSelect(QueryParam queryParam, String alias) { - String aggStr = queryParam.getAggregators().stream() + private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) { + String aggStr = structQueryParam.getAggregators().stream() .map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll") .collect(Collectors.joining(",")); List groups = new ArrayList<>(); - for (String group : queryParam.getGroups()) { + for (String group : structQueryParam.getGroups()) { groups.add(alias + group + " as " + group + "_roll"); } return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr; } - private String getGroupDimWithOutTime(QueryParam queryParam) { - String timeDim = getTimeDim(queryParam); - return queryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim)) + private String getGroupDimWithOutTime(StructQueryParam structQueryParam) { + String timeDim = getTimeDim(structQueryParam); + return structQueryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim)) .collect(Collectors.joining(",")); } - private static String getTimeDim(QueryParam queryParam) { + private static String getTimeDim(StructQueryParam structQueryParam) { DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class); - return dateModeUtils.getSysDateCol(queryParam.getDateInfo()); + return dateModeUtils.getSysDateCol(structQueryParam.getDateInfo()); } - private static String getLimit(QueryParam queryParam) { - if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) { - return " limit " + String.valueOf(queryParam.getLimit()); + private static String getLimit(StructQueryParam structQueryParam) { + if (structQueryParam != null && structQueryParam.getLimit() != null + && structQueryParam.getLimit() > 0) { + return " limit " + String.valueOf(structQueryParam.getLimit()); } return ""; } - private String getAllSelect(QueryParam queryParam, String alias) { - String aggStr = queryParam.getAggregators().stream().map(f -> getSelectField(f, alias)) - .collect(Collectors.joining(",")); - return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr - : alias + String.join("," + alias, queryParam.getGroups()) + "," + aggStr; + private String getAllSelect(StructQueryParam structQueryParam, String alias) { + String aggStr = structQueryParam.getAggregators().stream() + .map(f -> getSelectField(f, alias)).collect(Collectors.joining(",")); + return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr + : alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr; } private String getSelectField(final Aggregator agg, String alias) { @@ -421,32 +386,32 @@ public class CalculateAggConverter implements QueryConverter { return sqlGenerateUtils.getSelectField(agg); } - private String getGroupBy(QueryParam queryParam) { - if (CollectionUtils.isEmpty(queryParam.getGroups())) { + private String getGroupBy(StructQueryParam structQueryParam) { + if (CollectionUtils.isEmpty(structQueryParam.getGroups())) { return ""; } - return "group by " + String.join(",", queryParam.getGroups()); + return "group by " + String.join(",", structQueryParam.getGroups()); } - private static String getOrderBy(QueryParam queryParam) { - return "order by " + getTimeDim(queryParam) + " desc"; + private static String getOrderBy(StructQueryParam structQueryParam) { + return "order by " + getTimeDim(structQueryParam) + " desc"; } - private boolean isOverRatio(QueryParam queryParam) { - Long overCt = queryParam.getAggregators().stream() + private boolean isOverRatio(StructQueryParam structQueryParam) { + Long overCt = structQueryParam.getAggregators().stream() .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); return overCt > 0; } - private void check(QueryParam queryParam) throws Exception { - Long ratioOverNum = queryParam.getAggregators().stream() + private void check(StructQueryParam structQueryParam) throws Exception { + Long ratioOverNum = structQueryParam.getAggregators().stream() .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); - Long ratioRollNum = queryParam.getAggregators().stream() + Long ratioRollNum = structQueryParam.getAggregators().stream() .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count(); if (ratioOverNum > 0 && ratioRollNum > 0) { throw new Exception("not support over ratio and roll ratio together "); } - if (getTimeDim(queryParam).isEmpty()) { + if (getTimeDim(structQueryParam).isEmpty()) { throw new Exception("miss time filter"); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java deleted file mode 100644 index 12c8dd722..000000000 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/ParserDefaultConverter.java +++ /dev/null @@ -1,74 +0,0 @@ -package com.tencent.supersonic.headless.core.translator.converter; - -import com.tencent.supersonic.common.pojo.ColumnOrder; -import com.tencent.supersonic.common.util.ContextUtils; -import com.tencent.supersonic.headless.api.pojo.QueryParam; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; -import com.tencent.supersonic.headless.core.pojo.QueryStatement; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.BeanUtils; -import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; - -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -/** QueryConverter default implement */ -@Component("ParserDefaultConverter") -@Slf4j -public class ParserDefaultConverter implements QueryConverter { - - @Override - public boolean accept(QueryStatement queryStatement) { - if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) { - return false; - } - CalculateAggConverter calculateConverterAgg = - ContextUtils.getBean(CalculateAggConverter.class); - return !calculateConverterAgg.accept(queryStatement); - } - - @Override - public void convert(QueryStatement queryStatement) throws Exception { - SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); - QueryParam queryParam = queryStatement.getQueryParam(); - MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam(); - MetricQueryParam metricReq = - generateSqlCommand(queryStatement.getQueryParam(), queryStatement); - queryStatement.setMinMaxTime(sqlGenerateUtils.getBeginEndTime(queryParam, null)); - BeanUtils.copyProperties(metricReq, metricQueryParam); - } - - public MetricQueryParam generateSqlCommand(QueryParam queryParam, - QueryStatement queryStatement) { - SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); - MetricQueryParam metricQueryParam = new MetricQueryParam(); - metricQueryParam.setMetrics(queryParam.getMetrics()); - metricQueryParam.setDimensions(queryParam.getGroups()); - String where = sqlGenerateUtils.generateWhere(queryParam, null); - log.info("in generateSqlCommend, complete where:{}", where); - - metricQueryParam.setWhere(where); - metricQueryParam.setOrder(queryParam.getOrders().stream() - .map(order -> new ColumnOrder(order.getColumn(), order.getDirection())) - .collect(Collectors.toList())); - metricQueryParam.setLimit(queryParam.getLimit()); - - // support detail query - if (queryParam.getQueryType().isNativeAggQuery() - && CollectionUtils.isEmpty(metricQueryParam.getMetrics())) { - Map modelMap = queryStatement.getOntology().getModelMap(); - for (Long modelId : modelMap.keySet()) { - String modelBizName = modelMap.get(modelId).getName(); - String internalMetricName = - sqlGenerateUtils.generateInternalMetricName(modelBizName); - metricQueryParam.getMetrics().add(internalMetricName); - } - } - - return metricQueryParam; - } -} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java new file mode 100644 index 000000000..1b2e55362 --- /dev/null +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java @@ -0,0 +1,308 @@ +package com.tencent.supersonic.headless.core.translator.converter; + +import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; +import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; +import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; +import com.tencent.supersonic.common.pojo.Constants; +import com.tencent.supersonic.common.pojo.enums.EngineType; +import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; +import com.tencent.supersonic.common.util.ContextUtils; +import com.tencent.supersonic.headless.api.pojo.Measure; +import com.tencent.supersonic.headless.api.pojo.SchemaItem; +import com.tencent.supersonic.headless.api.pojo.enums.AggOption; +import com.tencent.supersonic.headless.api.pojo.enums.MetricType; +import com.tencent.supersonic.headless.api.pojo.response.*; +import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor; +import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory; +import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; +import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.springframework.stereotype.Component; +import org.springframework.util.CollectionUtils; + +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Component("SqlQueryConverter") +@Slf4j +public class SqlQueryConverter implements QueryConverter { + + @Override + public boolean accept(QueryStatement queryStatement) { + if (Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL()) { + return true; + } + return false; + } + + @Override + public void convert(QueryStatement queryStatement) throws Exception { + SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); + convertNameToBizName(queryStatement); + rewriteFunction(queryStatement); + String reqSql = queryStatement.getSqlQueryParam().getSql(); + String tableName = SqlSelectHelper.getTableName(reqSql); + if (StringUtils.isEmpty(tableName)) { + return; + } + + // replace order by field with the select sequence number + queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderbyField(reqSql)); + log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql()); + + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + // fill dataSetQuery + SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); + sqlQueryParam.setTable(tableName.toLowerCase()); + if (!sqlGenerateUtils.isSupportWith( + EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()), + semanticSchemaResp.getDatabaseResp().getVersion())) { + sqlQueryParam.setSupportWith(false); + sqlQueryParam.setWithAlias(false); + } + + // build ontologyQuery + List allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql()); + List metricSchemas = getMetrics(semanticSchemaResp, allFields); + List metrics = + metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); + AggOption aggOption = getAggOption(queryStatement, metricSchemas); + Set dimensions = getDimensions(semanticSchemaResp, allFields); + OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); + ontologyQueryParam.getMetrics().addAll(metrics); + ontologyQueryParam.getDimensions().addAll(dimensions); + ontologyQueryParam.setAggOption(aggOption); + ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption)); + + log.info("parse sqlQuery [{}] ", sqlQueryParam); + queryStatement.setOntologyQueryParam(ontologyQueryParam); + queryStatement.setSql(sqlQueryParam.getSql()); + generateDerivedMetric(sqlGenerateUtils, queryStatement); + } + + private AggOption getAggOption(QueryStatement queryStatement, + List metricSchemas) { + String sql = queryStatement.getSql(); + if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) { + return AggOption.AGGREGATION; + } + + if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) + && !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) { + log.debug("getAggOption simple sql set to DEFAULT"); + return AggOption.DEFAULT; + } + + // if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE" + // if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE" + if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) + || SqlSelectFunctionHelper.hasFunction(sql, "count") + || SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) { + return AggOption.OUTER; + } + + if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql) + || SqlSelectHelper.hasGroupBy(sql)) { + return AggOption.OUTER; + } + long defaultAggNullCnt = metricSchemas.stream().filter( + m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg())) + .count(); + if (defaultAggNullCnt > 0) { + log.debug("getAggOption find null defaultAgg metric set to NATIVE"); + return AggOption.OUTER; + } + return AggOption.DEFAULT; + } + + private Set getDimensions(SemanticSchemaResp semanticSchemaResp, + List allFields) { + Map dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream() + .collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(), + SchemaItem::getBizName, (k1, k2) -> k1)); + dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(), + TimeDimensionEnum.DAY.getName()); + return allFields.stream() + .filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase())) + .map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase())) + .collect(Collectors.toSet()); + } + + private List getMetrics(SemanticSchemaResp semanticSchemaResp, + List allFields) { + Map metricLowerToNameMap = + semanticSchemaResp.getMetrics().stream().collect(Collectors + .toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry)); + return allFields.stream() + .filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase())) + .map(entry -> metricLowerToNameMap.get(entry.toLowerCase())) + .collect(Collectors.toList()); + } + + + private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, + QueryStatement queryStatement) { + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + SqlQueryParam dsParam = queryStatement.getSqlQueryParam(); + OntologyQueryParam ontology = queryStatement.getOntologyQueryParam(); + String sql = dsParam.getSql(); + + Set measures = new HashSet<>(); + Map replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp, + ontology.getAggOption(), ontology.getMetrics(), ontology.getDimensions(), measures); + + if (!CollectionUtils.isEmpty(replaces)) { + // metricTable sql use measures replace metric + sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces); + ontology.setAggOption(AggOption.NATIVE); + // metricTable use measures replace metric + if (!CollectionUtils.isEmpty(measures)) { + ontology.getMetrics().addAll(measures); + } else { + // empty measure , fill default + ontology.setMetrics(new ArrayList<>()); + ontology.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( + getDefaultModel(semanticSchemaResp, ontology.getDimensions()))); + } + } + + dsParam.setSql(sql); + } + + private Map generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, + SemanticSchemaResp semanticSchemaResp, AggOption aggOption, List metrics, + List dimensions, Set measures) { + Map result = new HashMap<>(); + List metricResps = semanticSchemaResp.getMetrics(); + List dimensionResps = semanticSchemaResp.getDimensions(); + + // Check if any metric is derived + boolean hasDerivedMetrics = + metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType + .isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams())); + if (!hasDerivedMetrics) { + return result; + } + + log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics); + + Set allFields = new HashSet<>(); + Map allMeasures = new HashMap<>(); + semanticSchemaResp.getModelResps().forEach(modelResp -> { + allFields.addAll(modelResp.getFieldList()); + if (modelResp.getModelDetail().getMeasures() != null) { + modelResp.getModelDetail().getMeasures() + .forEach(measure -> allMeasures.put(measure.getBizName(), measure)); + } + }); + + Set derivedDimensions = new HashSet<>(); + Set derivedMetrics = new HashSet<>(); + Map visitedMetrics = new HashMap<>(); + + for (MetricResp metricResp : metricResps) { + if (metrics.contains(metricResp.getBizName())) { + boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(), + metricResp.getMetricDefineByMeasureParams()); + if (isDerived) { + String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields, + allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp), + metricResp.getMetricDefineType(), aggOption, visitedMetrics, + derivedMetrics, derivedDimensions); + result.put(metricResp.getBizName(), expr); + log.debug("derived metric {}->{}", metricResp.getBizName(), expr); + } else { + measures.add(metricResp.getBizName()); + } + } + } + + measures.addAll(derivedMetrics); + derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension)) + .forEach(dimensions::add); + + return result; + } + + + private void convertNameToBizName(QueryStatement queryStatement) { + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + Map fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); + String sql = queryStatement.getSqlQueryParam().getSql(); + log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(), + sql); + sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); + log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(), + sql); + sql = SqlReplaceHelper.replaceTable(sql, + Constants.TABLE_PREFIX + queryStatement.getDataSetId()); + log.debug("replaceTableName after:{}", sql); + queryStatement.getSqlQueryParam().setSql(sql); + } + + + private void rewriteFunction(QueryStatement queryStatement) { + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + DatabaseResp database = semanticSchemaResp.getDatabaseResp(); + String sql = queryStatement.getSqlQueryParam().getSql(); + if (Objects.isNull(database) || Objects.isNull(database.getType())) { + return; + } + String type = database.getType(); + DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase()); + if (Objects.nonNull(engineAdaptor)) { + String functionNameCorrector = engineAdaptor.functionNameCorrector(sql); + queryStatement.getSqlQueryParam().setSql(functionNameCorrector); + } + } + + + protected Map getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) { + // support fieldName and field alias to bizName + Map dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap( + entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); + + Map metricResults = semanticSchemaResp.getMetrics().stream().flatMap( + entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); + + dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap()); + dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap()); + dimensionResults.putAll(metricResults); + return dimensionResults; + } + + private Stream> getPairStream(String aliasStr, String name, + String bizName) { + Set> elements = new HashSet<>(); + elements.add(Pair.of(name, bizName)); + if (StringUtils.isNotBlank(aliasStr)) { + List aliasList = SchemaItem.getAliasList(aliasStr); + for (String alias : aliasList) { + elements.add(Pair.of(alias, bizName)); + } + } + return elements.stream(); + } + + private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List dimensions) { + if (!CollectionUtils.isEmpty(dimensions)) { + Map modelMatchCnt = new HashMap<>(); + for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { + modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions() + .stream().filter(d -> dimensions.contains(d.getBizName())).count()); + } + return modelMatchCnt.entrySet().stream() + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) + .map(m -> m.getKey()).findFirst().orElse(""); + } + return semanticSchemaResp.getModelResps().get(0).getBizName(); + } + +} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java similarity index 86% rename from headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java rename to headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java index e2c5f9a06..299ea4609 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableParseConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java @@ -14,12 +14,12 @@ import java.util.List; import java.util.Objects; @Slf4j -@Component("SqlVariableParseConverter") -public class SqlVariableParseConverter implements QueryConverter { +@Component("SqlVariableConverter") +public class SqlVariableConverter implements QueryConverter { @Override public boolean accept(QueryStatement queryStatement) { - if (Objects.isNull(queryStatement.getQueryParam())) { + if (Objects.isNull(queryStatement.getStructQueryParam()) && queryStatement.getIsS2SQL()) { return false; } return true; @@ -38,7 +38,7 @@ public class SqlVariableParseConverter implements QueryConverter { String sqlParsed = SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), modelResp.getModelDetail().getSqlVariables(), - queryStatement.getQueryParam().getParams()); + queryStatement.getStructQueryParam().getParams()); DataModel dataModel = queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); dataModel.setSqlQuery(sqlParsed); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java new file mode 100644 index 000000000..38aa00794 --- /dev/null +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java @@ -0,0 +1,74 @@ +package com.tencent.supersonic.headless.core.translator.converter; + +import com.tencent.supersonic.common.pojo.ColumnOrder; +import com.tencent.supersonic.common.pojo.enums.EngineType; +import com.tencent.supersonic.common.util.ContextUtils; +import com.tencent.supersonic.headless.api.pojo.enums.AggOption; +import com.tencent.supersonic.headless.core.pojo.Database; +import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; +import com.tencent.supersonic.headless.core.pojo.StructQueryParam; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; +import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +import java.util.Objects; +import java.util.stream.Collectors; + +@Component("ParserDefaultConverter") +@Slf4j +public class StructQueryConverter implements QueryConverter { + + @Override + public boolean accept(QueryStatement queryStatement) { + if (Objects.nonNull(queryStatement.getStructQueryParam()) && !queryStatement.getIsS2SQL()) { + return true; + } + + return false; + } + + @Override + public void convert(QueryStatement queryStatement) throws Exception { + SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); + StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); + + String dsTable = "t_1"; + SqlQueryParam sqlParam = new SqlQueryParam(); + sqlParam.setTable(dsTable); + String sql = String.format("select %s from %s %s %s %s", + sqlGenerateUtils.getSelect(structQueryParam), dsTable, + sqlGenerateUtils.getGroupBy(structQueryParam), + sqlGenerateUtils.getOrderBy(structQueryParam), + sqlGenerateUtils.getLimit(structQueryParam)); + Database database = queryStatement.getOntology().getDatabase(); + EngineType engineType = EngineType.fromString(database.getType().toUpperCase()); + if (!sqlGenerateUtils.isSupportWith(engineType, database.getVersion())) { + sqlParam.setSupportWith(false); + sql = String.format("select %s from %s t0 %s %s %s", + sqlGenerateUtils.getSelect(structQueryParam), dsTable, + sqlGenerateUtils.getGroupBy(structQueryParam), + sqlGenerateUtils.getOrderBy(structQueryParam), + sqlGenerateUtils.getLimit(structQueryParam)); + } + sqlParam.setSql(sql); + queryStatement.setSqlQueryParam(sqlParam); + + OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); + ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups()); + ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream() + .map(a -> a.getColumn()).collect(Collectors.toList())); + String where = sqlGenerateUtils.generateWhere(structQueryParam, null); + ontologyQueryParam.setWhere(where); + ontologyQueryParam.setAggOption(AggOption.AGGREGATION); + ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery()); + ontologyQueryParam.setOrder(structQueryParam.getOrders().stream() + .map(order -> new ColumnOrder(order.getColumn(), order.getDirection())) + .collect(Collectors.toList())); + ontologyQueryParam.setLimit(structQueryParam.getLimit()); + queryStatement.setOntologyQueryParam(ontologyQueryParam); + log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam()); + } + +} diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlGenerateUtils.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlGenerateUtils.java index 60f4097d8..8adf84d3e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlGenerateUtils.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlGenerateUtils.java @@ -12,7 +12,6 @@ import com.tencent.supersonic.common.util.DateModeUtils; import com.tencent.supersonic.common.util.SqlFilterUtils; import com.tencent.supersonic.common.util.StringUtil; import com.tencent.supersonic.headless.api.pojo.Measure; -import com.tencent.supersonic.headless.api.pojo.QueryParam; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; @@ -20,6 +19,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.MetricResp; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; import com.tencent.supersonic.headless.core.config.ExecutorConfig; +import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; @@ -85,25 +85,26 @@ public class SqlGenerateUtils { return selectSql; } - public String getLimit(QueryParam queryParam) { - if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) { - return " limit " + queryParam.getLimit(); + public String getLimit(StructQueryParam structQueryParam) { + if (structQueryParam != null && structQueryParam.getLimit() != null + && structQueryParam.getLimit() > 0) { + return " limit " + structQueryParam.getLimit(); } return ""; } - public String getSelect(QueryParam queryParam) { - String aggStr = queryParam.getAggregators().stream().map(this::getSelectField) + public String getSelect(StructQueryParam structQueryParam) { + String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField) .collect(Collectors.joining(",")); - return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr - : String.join(",", queryParam.getGroups()) + "," + aggStr; + return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr + : String.join(",", structQueryParam.getGroups()) + "," + aggStr; } - public String getSelect(QueryParam queryParam, Map deriveMetrics) { - String aggStr = queryParam.getAggregators().stream() + public String getSelect(StructQueryParam structQueryParam, Map deriveMetrics) { + String aggStr = structQueryParam.getAggregators().stream() .map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(",")); - return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr - : String.join(",", queryParam.getGroups()) + "," + aggStr; + return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr + : String.join(",", structQueryParam.getGroups()) + "," + aggStr; } public String getSelectField(final Aggregator agg) { @@ -128,46 +129,46 @@ public class SqlGenerateUtils { return deriveMetrics.get(agg.getColumn()); } - public String getGroupBy(QueryParam queryParam) { - if (CollectionUtils.isEmpty(queryParam.getGroups())) { + public String getGroupBy(StructQueryParam structQueryParam) { + if (CollectionUtils.isEmpty(structQueryParam.getGroups())) { return ""; } - return "group by " + String.join(",", queryParam.getGroups()); + return "group by " + String.join(",", structQueryParam.getGroups()); } - public String getOrderBy(QueryParam queryParam) { - if (CollectionUtils.isEmpty(queryParam.getOrders())) { + public String getOrderBy(StructQueryParam structQueryParam) { + if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { return ""; } - return "order by " + queryParam.getOrders().stream() + return "order by " + structQueryParam.getOrders().stream() .map(order -> " " + order.getColumn() + " " + order.getDirection() + " ") .collect(Collectors.joining(",")); } - public String getOrderBy(QueryParam queryParam, Map deriveMetrics) { - if (CollectionUtils.isEmpty(queryParam.getOrders())) { + public String getOrderBy(StructQueryParam structQueryParam, Map deriveMetrics) { + if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { return ""; } - if (!queryParam.getOrders().stream() + if (!structQueryParam.getOrders().stream() .anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) { - return getOrderBy(queryParam); + return getOrderBy(structQueryParam); } - return "order by " + queryParam.getOrders().stream() + return "order by " + structQueryParam.getOrders().stream() .map(order -> " " + (deriveMetrics.containsKey(order.getColumn()) ? deriveMetrics.get(order.getColumn()) : order.getColumn()) + " " + order.getDirection() + " ") .collect(Collectors.joining(",")); } - public String generateWhere(QueryParam queryParam, ItemDateResp itemDateResp) { + public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) { String whereClauseFromFilter = - sqlFilterUtils.getWhereClause(queryParam.getDimensionFilters()); - String whereFromDate = getDateWhereClause(queryParam.getDateInfo(), itemDateResp); - return mergeDateWhereClause(queryParam, whereClauseFromFilter, whereFromDate); + sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters()); + String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp); + return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate); } - private String mergeDateWhereClause(QueryParam queryParam, String whereClauseFromFilter, - String whereFromDate) { + private String mergeDateWhereClause(StructQueryParam structQueryParam, + String whereClauseFromFilter, String whereFromDate) { if (StringUtils.isNotEmpty(whereFromDate) && StringUtils.isNotEmpty(whereClauseFromFilter)) { return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter); @@ -179,7 +180,7 @@ public class SqlGenerateUtils { return whereFromDate; } else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) { log.debug("the current date information is empty, enter the date initialization logic"); - return dateModeUtils.defaultRecentDateInfo(queryParam.getDateInfo()); + return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo()); } return whereClauseFromFilter; } @@ -203,12 +204,12 @@ public class SqlGenerateUtils { return dateModeUtils.getDateWhereStr(dateInfo, dateDate); } - public Triple getBeginEndTime(QueryParam queryParam, + public Triple getBeginEndTime(StructQueryParam structQueryParam, ItemDateResp dataDate) { - if (Objects.isNull(queryParam.getDateInfo())) { + if (Objects.isNull(structQueryParam.getDateInfo())) { return Triple.of("", "", ""); } - DateConf dateConf = queryParam.getDateInfo(); + DateConf dateConf = structQueryParam.getDateInfo(); String dateInfo = dateModeUtils.getSysDateCol(dateConf); if (dateInfo.isEmpty()) { return Triple.of("", "", ""); diff --git a/headless/core/src/test/java/com/tencent/supersonic/chat/core/parser/aggregate/CalciteSqlParserTest.java b/headless/core/src/test/java/com/tencent/supersonic/chat/core/parser/aggregate/CalciteSqlParserTest.java index 5a3124bf8..72fa452c9 100644 --- a/headless/core/src/test/java/com/tencent/supersonic/chat/core/parser/aggregate/CalciteSqlParserTest.java +++ b/headless/core/src/test/java/com/tencent/supersonic/chat/core/parser/aggregate/CalciteSqlParserTest.java @@ -1,7 +1,6 @@ package com.tencent.supersonic.chat.core.parser.aggregate; import com.alibaba.fastjson.JSON; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser; import org.junit.jupiter.api.Test; @@ -318,7 +317,7 @@ public class CalciteSqlParserTest { + " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}"; QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class); CalciteQueryParser calciteSqlParser = new CalciteQueryParser(); - calciteSqlParser.parse(queryStatement, AggOption.DEFAULT); + calciteSqlParser.parse(queryStatement); Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""), "SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`" + "GROUPBY`imp_date`,`imp_date`"); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java index c26b66c44..306ac693a 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/service/impl/S2SemanticLayerService.java @@ -6,24 +6,10 @@ import com.tencent.supersonic.common.pojo.QueryColumn; import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; -import com.tencent.supersonic.headless.api.pojo.DataSetSchema; -import com.tencent.supersonic.headless.api.pojo.Dim; -import com.tencent.supersonic.headless.api.pojo.MetaFilter; -import com.tencent.supersonic.headless.api.pojo.QueryParam; +import com.tencent.supersonic.headless.api.pojo.*; import com.tencent.supersonic.headless.api.pojo.enums.SemanticType; -import com.tencent.supersonic.headless.api.pojo.request.DimensionValueReq; -import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq; -import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; -import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; -import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq; -import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq; -import com.tencent.supersonic.headless.api.pojo.response.DimensionResp; -import com.tencent.supersonic.headless.api.pojo.response.ItemResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricResp; -import com.tencent.supersonic.headless.api.pojo.response.ModelResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp; +import com.tencent.supersonic.headless.api.pojo.request.*; +import com.tencent.supersonic.headless.api.pojo.response.*; import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult; import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService; import com.tencent.supersonic.headless.chat.knowledge.MapResult; @@ -33,6 +19,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper; import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; +import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.translator.SemanticTranslator; import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.server.annotation.S2DataPermission; @@ -52,12 +40,7 @@ import org.apache.commons.lang3.StringUtils; import org.springframework.beans.BeanUtils; import org.springframework.stereotype.Service; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; @Service @@ -307,30 +290,13 @@ public class S2SemanticLayerService implements SemanticLayerService { return queryStatement; } - private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) { - // If dataSetId or DataSetName is empty, parse dataSetId from the SQL - if (querySqlReq.needGetDataSetId()) { - Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user); - querySqlReq.setDataSetId(dataSetId); - } - - QueryStatement queryStatement = buildStructQueryStatement(querySqlReq); - queryStatement.setIsS2SQL(true); - queryStatement.setSql(querySqlReq.getSql()); - return queryStatement; - } - - private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) { + private QueryStatement buildQueryStatement(SemanticQueryReq queryReq) { SchemaFilterReq schemaFilterReq = new SchemaFilterReq(); schemaFilterReq.setDataSetId(queryReq.getDataSetId()); schemaFilterReq.setModelIds(queryReq.getModelIds()); SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq); QueryStatement queryStatement = new QueryStatement(); - QueryParam queryParam = new QueryParam(); - BeanUtils.copyProperties(queryReq, queryParam); - queryStatement.setQueryParam(queryParam); - queryStatement.setModelIds(queryReq.getModelIds()); queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setDataSetId(queryReq.getDataSetId()); queryStatement.setSemanticSchemaResp(semanticSchemaResp); @@ -338,6 +304,31 @@ public class S2SemanticLayerService implements SemanticLayerService { return queryStatement; } + private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) { + QueryStatement queryStatement = buildQueryStatement(querySqlReq); + queryStatement.setIsS2SQL(true); + + SqlQueryParam sqlQueryParam = new SqlQueryParam(); + sqlQueryParam.setSql(querySqlReq.getSql()); + queryStatement.setSqlQueryParam(sqlQueryParam); + + // If dataSetId or DataSetName is empty, parse dataSetId from the SQL + if (querySqlReq.needGetDataSetId()) { + Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user); + querySqlReq.setDataSetId(dataSetId); + } + return queryStatement; + } + + private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) { + QueryStatement queryStatement = buildQueryStatement(queryReq); + StructQueryParam structQueryParam = new StructQueryParam(); + BeanUtils.copyProperties(queryReq, structQueryParam); + queryStatement.setStructQueryParam(structQueryParam); + queryStatement.setIsS2SQL(false); + return queryStatement; + } + private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) { List queryStatements = new ArrayList<>(); for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) { diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java index 12caf5b77..5c6de4df9 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/MetricDrillDownChecker.java @@ -67,7 +67,8 @@ public class MetricDrillDownChecker { List metricResps = getMetrics(metricFields, semanticSchemaResp); if (!checkDrillDownDimension(dimensionBizName, metricResps, semanticSchemaResp)) { DimSchemaResp dimSchemaResp = semanticSchemaResp.getDimension(dimensionBizName); - if (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime()) { + if (Objects.isNull(dimSchemaResp) + || (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime())) { continue; } String errMsg = diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index 656ccd081..c28e3e1ed 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -2,20 +2,13 @@ package com.tencent.supersonic.headless.server.calcite; import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp; -import com.tencent.supersonic.headless.core.pojo.MetricQueryParam; import com.tencent.supersonic.headless.core.pojo.QueryStatement; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; -import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl; +import com.tencent.supersonic.headless.server.pojo.yaml.*; import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; @@ -26,7 +19,7 @@ import java.util.List; class HeadlessParserServiceTest { public static SqlParserResp parser(S2CalciteSchema semanticSchema, - MetricQueryParam metricQueryParam, boolean isAgg) { + OntologyQueryParam ontologyQueryParam, boolean isAgg) { SqlParserResp sqlParser = new SqlParserResp(); try { if (semanticSchema == null) { @@ -35,14 +28,14 @@ class HeadlessParserServiceTest { } SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); QueryStatement queryStatement = new QueryStatement(); - queryStatement.setMetricQueryParam(metricQueryParam); - aggBuilder.build(queryStatement, AggOption.getAggregation(!isAgg)); + queryStatement.setOntologyQueryParam(ontologyQueryParam); + aggBuilder.build(queryStatement); EngineType engineType = EngineType.fromString(semanticSchema.getOntology().getDatabase().getType()); sqlParser.setSql(aggBuilder.getSql(engineType)); } catch (Exception e) { sqlParser.setErrMsg(e.getMessage()); - log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e); + log.error("parser error metricQueryReq[{}] error [{}]", ontologyQueryParam, e); } return sqlParser; } @@ -161,7 +154,7 @@ class HeadlessParserServiceTest { // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); - MetricQueryParam metricCommand = new MetricQueryParam(); + OntologyQueryParam metricCommand = new OntologyQueryParam(); metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date"))); metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv"))); metricCommand.setWhere( @@ -174,7 +167,7 @@ class HeadlessParserServiceTest { addDepartment(semanticSchema); - MetricQueryParam metricCommand2 = new MetricQueryParam(); + OntologyQueryParam metricCommand2 = new OntologyQueryParam(); metricCommand2.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date", "user_name__department", "user_name", "user_name__page"))); metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv"))); diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/utils/QueryNLReqBuilderTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/utils/QueryNLReqBuilderTest.java index 7cec555f9..63fa85ed9 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/utils/QueryNLReqBuilderTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/utils/QueryNLReqBuilderTest.java @@ -68,8 +68,5 @@ class QueryNLReqBuilderTest { DateModeUtils dateModeUtils = new DateModeUtils(); mockContextUtils.when(() -> ContextUtils.getBean(DateModeUtils.class)) .thenReturn(dateModeUtils); - dateModeUtils.setSysDateCol("sys_imp_date"); - dateModeUtils.setSysDateWeekCol("sys_imp_week"); - dateModeUtils.setSysDateMonthCol("sys_imp_month"); } } diff --git a/launchers/headless/src/main/resources/META-INF/spring.factories b/launchers/headless/src/main/resources/META-INF/spring.factories index 82b9e8397..d91c2e7d3 100644 --- a/launchers/headless/src/main/resources/META-INF/spring.factories +++ b/launchers/headless/src/main/resources/META-INF/spring.factories @@ -26,9 +26,10 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ - com.tencent.supersonic.headless.core.translator.converter.SqlVariableParseConverter,\ - com.tencent.supersonic.headless.core.translator.converter.CalculateAggConverter,\ - com.tencent.supersonic.headless.core.translator.converter.ParserDefaultConverter + com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ + com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\ + com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ + com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter com.tencent.supersonic.headless.core.translator.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer @@ -46,4 +47,5 @@ com.tencent.supersonic.headless.core.cache.QueryCache=\ ### headless-server SPIs com.tencent.supersonic.headless.server.modeller.SemanticModeller=\ - com.tencent.supersonic.headless.server.modeller.RuleSemanticModeller \ No newline at end of file + com.tencent.supersonic.headless.server.modeller.RuleSemanticModeller, \ + com.tencent.supersonic.headless.server.modeller.LLMSemanticModeller \ No newline at end of file diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java index 030eb0ad3..df425a2c9 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java @@ -7,26 +7,12 @@ import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.chat.server.agent.AgentToolType; import com.tencent.supersonic.chat.server.agent.DatasetTool; import com.tencent.supersonic.chat.server.agent.ToolConfig; -import com.tencent.supersonic.chat.server.processor.execute.DataInterpretProcessor; import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.JoinCondition; import com.tencent.supersonic.common.pojo.ModelRela; -import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; -import com.tencent.supersonic.common.pojo.enums.AppModule; -import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; -import com.tencent.supersonic.common.pojo.enums.TimeMode; -import com.tencent.supersonic.common.pojo.enums.TypeEnums; +import com.tencent.supersonic.common.pojo.enums.*; import com.tencent.supersonic.common.util.ChatAppManager; -import com.tencent.supersonic.headless.api.pojo.AggregateTypeDefaultConfig; -import com.tencent.supersonic.headless.api.pojo.DataSetDetail; -import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig; -import com.tencent.supersonic.headless.api.pojo.Dim; -import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams; -import com.tencent.supersonic.headless.api.pojo.Identify; -import com.tencent.supersonic.headless.api.pojo.Measure; -import com.tencent.supersonic.headless.api.pojo.ModelDetail; -import com.tencent.supersonic.headless.api.pojo.QueryConfig; -import com.tencent.supersonic.headless.api.pojo.TimeDefaultConfig; +import com.tencent.supersonic.headless.api.pojo.*; import com.tencent.supersonic.headless.api.pojo.enums.DimensionType; import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType; import com.tencent.supersonic.headless.api.pojo.request.DataSetReq; @@ -40,11 +26,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.annotation.Order; import org.springframework.stereotype.Component; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; +import java.util.*; @Component @Slf4j @@ -272,7 +254,6 @@ public class S2CompanyDemo extends S2BaseDemo { Map chatAppConfig = Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); - chatAppConfig.get(DataInterpretProcessor.APP_KEY).setEnable(true); agent.setChatAppConfig(chatAppConfig); agentService.createAgent(agent, defaultUser); diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index 7f7cb56b4..77bc0081b 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -146,7 +146,8 @@ public class S2VisitsDemo extends S2BaseDemo { agent.setStatus(1); agent.setEnableSearch(1); agent.setExamples(Lists.newArrayList("近15天超音数访问次数汇总", "按部门统计超音数的访问人数", "对比alice和lucy的停留时长", - "过去30天访问次数最高的部门top3", "近1个月总访问次数超过100次的部门有几个", "过去半个月每个核心用户的总停留时长")); + "过去30天访问次数最高的部门top3", "近1个月总访问次数超过100次的部门有几个", "过去半个月每个核心用户的总停留时长", + "今年以来访问次数最高的一天是哪一天")); // configure tools ToolConfig toolConfig = new ToolConfig(); diff --git a/launchers/standalone/src/main/resources/META-INF/spring.factories b/launchers/standalone/src/main/resources/META-INF/spring.factories index 0a989e182..0c35cf42d 100644 --- a/launchers/standalone/src/main/resources/META-INF/spring.factories +++ b/launchers/standalone/src/main/resources/META-INF/spring.factories @@ -26,9 +26,10 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ - com.tencent.supersonic.headless.core.translator.converter.SqlVariableParseConverter,\ - com.tencent.supersonic.headless.core.translator.converter.CalculateAggConverter,\ - com.tencent.supersonic.headless.core.translator.converter.ParserDefaultConverter + com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ + com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\ + com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ + com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter com.tencent.supersonic.headless.core.translator.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java index a48825177..cab07b66c 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java @@ -12,7 +12,6 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; import com.tencent.supersonic.common.service.ChatModelService; import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; -import com.tencent.supersonic.headless.api.pojo.response.ParseResp; import com.tencent.supersonic.headless.api.pojo.response.QueryState; import com.tencent.supersonic.util.DataUtils; import org.springframework.beans.factory.annotation.Autowired; diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java index 1cf356a6f..966943e10 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java @@ -20,7 +20,7 @@ public class DetailTest extends BaseTest { @Test public void test_detail_dimension() throws Exception { - QueryResult actualResult = submitNewChat("周杰伦流派和代表作", DataUtils.tagAgentId); + QueryResult actualResult = submitNewChat("周杰伦流派和代表作", DataUtils.singerAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -31,7 +31,7 @@ public class DetailTest extends BaseTest { expectedParseInfo.setAggType(AggregateTypeEnum.NONE); QueryFilter dimensionFilter = - DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, "周杰伦", "歌手名", 8L); + DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, "周杰伦", "歌手名", 17L); expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.getDimensions() @@ -43,7 +43,7 @@ public class DetailTest extends BaseTest { @Test public void test_detail_filter() throws Exception { - QueryResult actualResult = submitNewChat("国风歌手", DataUtils.tagAgentId); + QueryResult actualResult = submitNewChat("国风歌手", DataUtils.singerAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java index d0e251eed..2d809bb70 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java @@ -9,6 +9,7 @@ import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.request.QueryFilter; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricFilterQuery; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricGroupByQuery; +import com.tencent.supersonic.headless.chat.query.rule.metric.MetricModelQuery; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricTopNQuery; import com.tencent.supersonic.util.DataUtils; import org.junit.jupiter.api.Order; @@ -28,13 +29,28 @@ import static com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum.SUM; public class MetricTest extends BaseTest { @Test - public void testMetric() throws Exception { - QueryResult actualResult = submitNewChat("超音数 访问次数", DataUtils.metricAgentId); + public void testMetricModel() throws Exception { + QueryResult actualResult = submitNewChat("超音数 访问次数", DataUtils.productAgentId); + + QueryResult expectedResult = new QueryResult(); + SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); + expectedResult.setChatContext(expectedParseInfo); + + expectedResult.setQueryMode(MetricModelQuery.QUERY_MODE); + expectedParseInfo.setAggType(NONE); + expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); + + expectedParseInfo.setDateInfo( + DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay)); + expectedParseInfo.setQueryType(QueryType.AGGREGATE); + + assertQueryResult(expectedResult, actualResult); + assert actualResult.getQueryResults().size() == 1; } @Test public void testMetricFilter() throws Exception { - QueryResult actualResult = submitNewChat("alice的访问次数", DataUtils.metricAgentId); + QueryResult actualResult = submitNewChat("alice的访问次数", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -57,7 +73,8 @@ public class MetricTest extends BaseTest { @Test public void testMetricGroupBy() throws Exception { - QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.metricAgentId); + System.setProperty("s2.test", "true"); + QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -79,7 +96,7 @@ public class MetricTest extends BaseTest { @Test public void testMetricFilterCompare() throws Exception { - QueryResult actualResult = submitNewChat("对比alice和lucy的访问次数", DataUtils.metricAgentId); + QueryResult actualResult = submitNewChat("对比alice和lucy的访问次数", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -107,7 +124,7 @@ public class MetricTest extends BaseTest { @Test @Order(3) public void testMetricTopN() throws Exception { - QueryResult actualResult = submitNewChat("近3天访问次数最多的用户", DataUtils.metricAgentId); + QueryResult actualResult = submitNewChat("近3天访问次数最多的用户", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -128,7 +145,7 @@ public class MetricTest extends BaseTest { @Test public void testMetricGroupBySum() throws Exception { - QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", DataUtils.metricAgentId); + QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); expectedResult.setChatContext(expectedParseInfo); @@ -154,7 +171,7 @@ public class MetricTest extends BaseTest { String dateStr = textFormat.format(format.parse(startDay)); QueryResult actualResult = - submitNewChat(String.format("alice在%s的访问次数", dateStr), DataUtils.metricAgentId); + submitNewChat(String.format("alice在%s的访问次数", dateStr), DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java index b4e3dd653..7353dd526 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java @@ -20,6 +20,7 @@ public class QueryByMetricTest extends BaseTest { @Test public void testWithMetricAndDimensionBizNames() throws Exception { + System.setProperty("s2.test", "true"); QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java index 7e1b3a37d..43a50e590 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java @@ -46,6 +46,7 @@ public class QueryByStructTest extends BaseTest { @Test public void testDetailQuery() throws Exception { + System.setProperty("s2.test", "true"); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("user_name", "department"), QueryType.DETAIL); SemanticQueryResp semanticQueryResp = @@ -86,6 +87,7 @@ public class QueryByStructTest extends BaseTest { @Test public void testFilterQuery() throws Exception { + System.setProperty("s2.test", "true"); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department")); List dimensionFilters = new ArrayList<>(); Filter filter = new Filter(); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java b/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java index 4e28f61a3..a882293c1 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java @@ -15,10 +15,10 @@ import static java.time.LocalDate.now; public class DataUtils { - public static final Integer metricAgentId = 1; - public static final Integer tagAgentId = 2; + public static final Integer productAgentId = 1; + public static final Integer companyAgentId = 2; + public static final Integer singerAgentId = 3; public static final Integer ONE_TURNS_CHAT_ID = 10; - public static final Integer MULTI_TURNS_CHAT_ID = 11; private static final User user_test = User.getDefaultUser(); public static User getUser() { @@ -40,7 +40,7 @@ public class DataUtils { public static ChatParseReq getChatParseReq(Integer id, String query, boolean enableLLM) { ChatParseReq chatParseReq = new ChatParseReq(); chatParseReq.setQueryText(query); - chatParseReq.setAgentId(metricAgentId); + chatParseReq.setAgentId(productAgentId); chatParseReq.setChatId(id); chatParseReq.setUser(user_test); chatParseReq.setDisableLLM(!enableLLM); diff --git a/launchers/standalone/src/test/resources/s2-config.yaml b/launchers/standalone/src/test/resources/s2-config.yaml index e570dfb12..a95a4df7e 100644 --- a/launchers/standalone/src/test/resources/s2-config.yaml +++ b/launchers/standalone/src/test/resources/s2-config.yaml @@ -21,7 +21,7 @@ s2: date: true demo: - names: S2VisitsDemo,S2SingerDemo + names: S2VisitsDemo,S2SingerDemo,S2CompanyDemo enableLLM: false authentication: From 91b16f95ff35cf51d2881892d3a58e794b8a4e57 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Mon, 25 Nov 2024 21:35:13 +0800 Subject: [PATCH 15/88] [improvement][headless]Add company demo schema and data to integration test. [improvement][headless]Clean code logic of headless translator. --- .../translator/DefaultSemanticTranslator.java | 24 +- .../calcite/CalciteQueryParser.java | 2 +- .../translator/calcite/sql/SqlBuilder.java | 2 +- .../calcite/HeadlessParserServiceTest.java | 2 +- .../src/test/resources/application-local.yaml | 4 +- .../src/test/resources/db/data-h2-demo.sql | 1083 ++++++++++++++++ .../src/test/resources/db/data-h2.sql | 1099 +---------------- .../src/test/resources/db/schema-h2-demo.sql | 85 ++ 8 files changed, 1193 insertions(+), 1108 deletions(-) create mode 100644 launchers/standalone/src/test/resources/db/data-h2-demo.sql create mode 100644 launchers/standalone/src/test/resources/db/schema-h2-demo.sql diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 79bfb478c..8474ce2fc 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -59,38 +59,42 @@ public class DefaultSemanticTranslator implements SemanticTranslator { SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); log.info("parse with ontology: [{}]", ontologyQueryParam); ComponentFactory.getQueryParser().parse(queryStatement); + String ontologyQueryTable = sqlQueryParam.getTable(); + String ontologyQuerySql = sqlQueryParam.getSql(); + String ontologySql = queryStatement.getSql(); if (!queryStatement.isOk()) { - throw new Exception(String.format("parse table [%s] error [%s]", - sqlQueryParam.getTable(), queryStatement.getErrMsg())); + throw new Exception(String.format("parse ontology table [%s] error [%s]", + ontologyQueryTable, queryStatement.getErrMsg())); } List> tables = new ArrayList<>(); - tables.add(Pair.of(sqlQueryParam.getTable(), queryStatement.getSql())); + + tables.add(Pair.of(ontologyQueryTable, ontologySql)); if (sqlQueryParam.isSupportWith()) { EngineType engineType = EngineType.fromString(queryStatement.getOntology().getDatabase().getType()); - if (!SqlMergeWithUtils.hasWith(engineType, sqlQueryParam.getSql())) { + if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) { String withSql = "with " + tables.stream() .map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight())) - .collect(Collectors.joining(",")) + "\n" + sqlQueryParam.getSql(); + .collect(Collectors.joining(",")) + "\n" + ontologyQuerySql; queryStatement.setSql(withSql); } else { List parentTableList = tables.stream().map(Pair::getLeft).collect(Collectors.toList()); List parentSqlList = tables.stream().map(Pair::getRight).collect(Collectors.toList()); - String mergeSql = SqlMergeWithUtils.mergeWith(engineType, sqlQueryParam.getSql(), + String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql, parentSqlList, parentTableList); queryStatement.setSql(mergeSql); } } else { - String dsSql = sqlQueryParam.getSql(); for (Pair tb : tables) { - dsSql = StringUtils.replace(dsSql, tb.getLeft(), "(" + tb.getRight() + ") " - + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1); + ontologyQuerySql = + StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight() + + ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1); } - queryStatement.setSql(dsSql); + queryStatement.setSql(ontologyQuerySql); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index 8a787b92c..1deb240ee 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -28,7 +28,7 @@ public class CalciteQueryParser implements QueryParser { .enableOptimize(queryStatement.getEnableOptimize()).build()) .build(); SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema); - sqlBuilder.build(queryStatement); + sqlBuilder.buildOntologySql(queryStatement); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index 6477245f9..e6db7eabe 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -35,7 +35,7 @@ public class SqlBuilder { this.schema = schema; } - public void build(QueryStatement queryStatement) throws Exception { + public void buildOntologySql(QueryStatement queryStatement) throws Exception { this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); if (ontologyQueryParam.getMetrics() == null) { ontologyQueryParam.setMetrics(new ArrayList<>()); diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index c28e3e1ed..38138d376 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -29,7 +29,7 @@ class HeadlessParserServiceTest { SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); QueryStatement queryStatement = new QueryStatement(); queryStatement.setOntologyQueryParam(ontologyQueryParam); - aggBuilder.build(queryStatement); + aggBuilder.buildOntologySql(queryStatement); EngineType engineType = EngineType.fromString(semanticSchema.getOntology().getDatabase().getType()); sqlParser.setSql(aggBuilder.getSql(engineType)); diff --git a/launchers/standalone/src/test/resources/application-local.yaml b/launchers/standalone/src/test/resources/application-local.yaml index 5f3e99e22..2ceb32c1e 100644 --- a/launchers/standalone/src/test/resources/application-local.yaml +++ b/launchers/standalone/src/test/resources/application-local.yaml @@ -6,8 +6,8 @@ spring: password: semantic sql: init: - schema-locations: classpath:db/schema-h2.sql - data-locations: classpath:db/data-h2.sql + schema-locations: classpath:db/schema-h2.sql,classpath:db/schema-h2-demo.sql + data-locations: classpath:db/data-h2.sql,classpath:db/data-h2-demo.sql h2: console: path: /h2-console/semantic diff --git a/launchers/standalone/src/test/resources/db/data-h2-demo.sql b/launchers/standalone/src/test/resources/db/data-h2-demo.sql new file mode 100644 index 000000000..6c0ab0091 --- /dev/null +++ b/launchers/standalone/src/test/resources/db/data-h2-demo.sql @@ -0,0 +1,1083 @@ +-------S2VisitsDemo +MERGE INTO s2_user_department (user_name, department) values ('jack','HR'); +MERGE INTO s2_user_department (user_name, department) values ('tom','sales'); +MERGE INTO s2_user_department (user_name, department) values ('lucy','marketing'); +MERGE INTO s2_user_department (user_name, department) values ('john','strategy'); +MERGE INTO s2_user_department (user_name, department) values ('alice','sales'); +MERGE INTO s2_user_department (user_name, department) values ('dean','marketing'); + +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'alice', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p1'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p3'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p4'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p5'); +INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p4'); + +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.7636857512911863', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', '0.17663327393462436', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.38943688941552057', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.2715819955225307', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.9358210273119568', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.9364586435510802', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.9707723036513162', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', '0.8497763866782723', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.15504417761372413', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.9507563118298399', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.9746364180572994', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', '0.12869214941133378', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.3024970533288409', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.6639702099980812', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'lucy', '0.4929901454858626', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.06853040276026445', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.8488086078299616', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.8589111177125592', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.5576357066482228', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.8047888670006846', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.766944548494366', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.5280072184505449', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.9693343356046343', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', '0.12805203958456424', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', '0.16963603387027637', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.5901202956521101', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.12710364646712236', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.6346530909156196', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.12461289103639872', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', '0.9863947334662437', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', '0.48899961064192987', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'alice', '0.5382796792688207', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', '0.3506568687014143', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.8633072449771709', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.13999135315363687', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.07258740493845894', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', '0.5244413940436958', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.13258670732966138', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.6015982054464575', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', '0.05513158944480323', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.6707121735296985', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.9330440339006469', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', '0.5630674323371607', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', '0.8720647566229917', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.8331899070546519', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', '0.6712876436249856', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', '0.6694409980332703', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.3703307480606334', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.775368688472696', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.9151205443267096', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.09543108823305857', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', '0.7893992120771057', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.5119923080070498', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.49906724167974936', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.046258282700961884', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', '0.44843595680103954', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', '0.7743935471689718', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.5855299615656824', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.9412963512379853', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.8383247587082538', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', '0.14517876867236124', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.9327229861441061', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.19042326582894153', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.6029067818254513', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.21715964747214422', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', '0.34259842721045974', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.7064419016593382', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', '0.5725636566517865', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', '0.22332539583809208', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.8049036189055911', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', '0.6029674758974956', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.11884976360561716', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', '0.7124916829130662', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.5893693718556829', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.602073304496253', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.10491061160039927', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.9006548872378379', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.8545144244288455', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.16915384987875726', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.2271640700690446', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.7807518577160636', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.8919859648888653', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.1564450687270359', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.5840549187653847', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', '0.2213255596777869', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', '0.07868261880306426', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.07710010861455818', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.5131249730162654', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.5035035055368601', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.8996978291173905', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.057442290722216294', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.6443079066865616', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', '0.7398098480748726', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', '0.9835694815034591', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', '0.9879213445635557', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.4020136688147111', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.6698797170128024', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.17325132416789113', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', '0.5784229486763606', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.9185978183932058', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.5474783153973963', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.9730731954700215', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.5390873359288765', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', '0.20522241320887713', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', '0.4088233242325021', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.7608047695853417', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.2749731221085713', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.06154055374702494', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.460668002022406', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.4474746325306228', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.5761666885467472', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', '0.33233441360339655', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.7426534909874778', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', '0.5841437875889118', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.2818296500094526', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', '0.8670888843915217', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', '0.5249294365740248', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.5483356748008438', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.7278566847412673', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.6779976902157362', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.09995341651736978', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.4528538159233879', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.5870756885301056', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.9842091927290255', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.04580936015706816', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.8814678270145769', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.06517379256096412', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.8769832364187129', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', '0.584562279025023', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.8102404090621375', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.11481653429176686', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.43422888918962554', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.0684414272594508', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.976546463969412', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.617906858141431', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.08663740247579998', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.7124944606691416', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.1321700521239627', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', '0.3078946609431664', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.6149442855237194', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', '0.5963801306980994', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.6999542038973406', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.4599112653446624', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.20300901401048832', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.39989705958717037', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.2486378364940327', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.16880398079144077', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.73927288385526', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.8645283506689198', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.3266940826759587', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.9195490073037541', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.9452523036658287', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.21269683438120535', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.7377502855387184', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.38981597634408716', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.7001799391999863', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.6616720024008785', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', '0.497721735058096', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', '0.22255613760959603', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.05247640233319417', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.27237572107833363', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', '0.9529452406380252', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.28243045060463157', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.17880444250082506', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.035050038002381156', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.840803223728221', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.5318457377361356', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.9280332892460665', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.752354382202208', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', '0.1866528331789219', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.7016165545791373', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.4191547989960899', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.7025516699007639', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.6160127317884274', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', '0.91223094958137', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.4383056089013998', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.595750781166582', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'lucy', '0.9472349338730268', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', '0.0519104588842193', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.48043983034526205', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.14754707786497478', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.36124288370035695', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', '0.21777919493494613', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.22637666702475057', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.9378215576942598', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.3309229261144562', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.7602880453727515', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.9470462487873785', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.6770215935547629', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.1586074803669385', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'lucy', '0.2754855564794071', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.8355347738454384', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.7251813505573811', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', '0.006606625589642534', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.304832277753024', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.026368662837989554', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', '0.6855977520602776', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'tom', '0.8193746826441749', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.021179295102459972', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.1533849522536005', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.18893553542301778', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.39870999343833624', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.9985665103520182', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.6961441157700171', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.9861933923851885', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', '0.993076500099477', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.4320547269058953', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.18441071030375877', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.1501504986117118', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.252021845734527', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', '0.24442701577183745', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.07563738855797564', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', '0.34247820646440985', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.9456979276862031', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.19494357263973816', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.9371493867882469', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.6136241316589367', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.8922330760877784', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', '0.9001986074661864', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.4889702884422866', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.2689551234431401', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.5223573993758465', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', '0.05042295556527243', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.2717147121880483', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.7397093309370814', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', '0.157064341631733', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', '0.7213399784998017', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', '0.764081440588005', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.7514070600074144', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.611647412825278', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.6600796877195596', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', '0.8942204153751679', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.07398121085929721', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', '0.1652506990439564', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.5849759516111703', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.1672502732600889', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.7836135556233219', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', '0.26181269644936356', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.6577275876355586', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.3067293364197956', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.8608288543866495', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.814283434116926', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'jack', '0.33993584425872936', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'john', '0.010812798859160089', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.5156558224263926', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', '0.46320035330198406', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.2651020283994786', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.42467241545664147', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.3695905136678498', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', '0.15269122123348644', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.6755688670583248', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'jack', '0.39064306179528907', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.36479296691952023', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', '0.5069249157662691', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.4785315495532231', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.7582526218052175', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.42064109605717914', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.5587757581237022', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.3561686564964428', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7101688305173135', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.6518061375522985', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.7564485884156583', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.36531347293134464', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', '0.5201689359070235', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.7138792929290383', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.9751003716333827', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.5281906318027629', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.6291356541485003', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', '0.1938712974807698', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', '0.6267850210775459', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.4469970592043767', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.7690659124175409', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.13335067838090386', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.2966621725922035', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.5740481445089863', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.838028890036331', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', '0.8094354537628714', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.5552924586108698', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.49150373927678315', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.7264346889377966', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.9292830287297702', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.3905616258240767', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.15912349648571666', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.6030082006630102', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.8712354035243679', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.7685306377211826', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.2869913942171415', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.7142615166855639', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.5625978475154423', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.13611601734791123', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.6977333962685311', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.35140477709778295', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.8805119222967716', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.7014124236538637', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.12759538003439375', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.7515403792213445', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', '0.03700239289885987', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.31674618364630946', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.4491378834800146', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.6742764131652571', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.5286362221140248', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.007890326473113496', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.8046560540950831', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7198364371127147', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.7400546712169153', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.16859870460868698', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.8462852684569557', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.010211452005474353', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.8617802368201087', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.21667479046797633', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.8667689615468714', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.16140709875863557', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.16713368182304666', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.8957484629768053', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', '0.457835758220534', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.9435170960198477', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', '0.9699253608913104', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.2309897429566834', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.7879705066452681', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.20795869239817255', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.4110352469382019', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', '0.4979592772533561', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', '0.18810865430947044', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'tom', '0.5001240246982048', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', '0.08341934160029707', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.04812784841651041', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.4655982693269717', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', '0.8539357978460663', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.9649541785823592', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.8243635648047365', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.929949719929735', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.055983276861168996', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.07845430274829746', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.28257674222099116', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.1578419214960578', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.7853118484860825', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.20790127125904156', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.8650538395535204', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.902116091225815', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', '0.48542770770171373', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.16725337150113984', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.3157444453259486', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.565727220131555', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', '0.2531688065358064', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.9191434620980499', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.9224628853942058', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', '0.3256288410730337', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.9709152566761661', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.9794173893522709', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.16582064407977237', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.2652519246960059', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.04092489871261762', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.3020444893927522', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.4655412764350543', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', '0.9226436424888846', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.4707663393012884', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.3277970119243966', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.4730675479071551', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.10261940477901954', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.4148892373198616', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.2877219827348403', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.16212409974675845', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.9567425121214822', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.19795350030679149', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.6954199597749198', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.32884293488801164', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.4789917995407148', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', '0.0698927593996298', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.3352267723792438', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.8085116661598726', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.17515060210353794', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.6006963088370202', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.8794167536704468', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.04091469320757368', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', '0.6709116812690366', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.4850646101328463', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.547488212623346', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.6301717145008927', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.06123370093612068', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.2545600223228257', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', '0.28355287519210803', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.3231348374147818', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.4585172495754063', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.7893945285152268', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.6810596014794181', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.7136031244915907', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', '0.259734039051829', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.7759518703827996', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.06288891046833589', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', '0.8242980461154241', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.36590300307021595', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.20254092528445444', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.5427356081880325', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.1467846603517391', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.8975527268892767', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', '0.3483541520806722', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.6922544855316723', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.3690185253006011', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.7564541265683148', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', '0.3634152133342695', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.33740378933701987', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.7942640738315301', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.7894896778233523', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.7153281477198108', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'tom', '0.5546359859065261', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.7727157385809087', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.8707097754747494', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.3873936520764878', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.7590305068820566', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.512826935863365', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', '0.19120284727846926', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.5382693105670825', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.826241649014955', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.6133080470571559', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.6452862617544055', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.3025772179023586', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '4.709864550322962E-4', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.024816355013726588', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', '0.8407500495605565', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.8420879584266481', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.2719224735814776', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.8939712577294938', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', '0.8086189323362379', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', '0.6063415085381448', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.39783242658234674', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.6085577206028068', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.5154289424127074', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.878436600887031', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.5577906295015223', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', '0.1143260282925247', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.312756557275364', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.05548807854726956', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', '0.12140791431139175', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.23897628700410234', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.22223137342481392', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.12379891645900953', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', '0.33729146112854247', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.8816768640060831', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.6301700633426532', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', '0.4566295223861714', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.1777378523933678', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.8163769471165477', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.4380805149704541', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.2987018822475964', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.6726495645391617', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.8394327461109705', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', '0.820512945501936', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.1580105370757261', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.9961450897279505', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.6574891890500061', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.5201205570085158', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.2445069633928285', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', '0.3155229654901067', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.3665971881269575', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.5544977915912215', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.15978771803015113', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.038128748344929186', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.49026304025118594', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.5166802080526571', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.22568230066042194', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.9888634109849955', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', '0.21022365182102054', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', '0.47052993358031114', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.25686122383263454', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.18929054223320718', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.7925339862375451', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.12613308249498645', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.7381524971311578', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.08639585437319919', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.9519897106846164', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.33446548574801926', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.40667134603483324', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.17100718420628735', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.4445585525686886', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.47372916928883013', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.19826861093848824', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.13679268112019338', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.9805515708224516', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', '0.4738376165601095', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.5739441073158964', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.8428505498030564', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.32655416551155336', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7055736367780644', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.9621355090189875', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.9665339161730553', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.44309781869697995', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', '0.8651220802537761', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.6451892308277741', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.056797307451316725', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.6847604118085596', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.13428051757364667', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.9814797176951834', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.7386074051153445', 'p3'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.4825297824657663', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.06608870508231235', 'p5'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.6278253028988848', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', '0.6705580511822682', 'p1'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.8131712486302015', 'p2'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.8124302447925607', 'p4'); +INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.039935860913407284', 'p2'); + + +-------S2ArtistDemo +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000); +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000); +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000); +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000); +MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000); + +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国'); +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国'); +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚'); +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国'); +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大'); +MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国'); + +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore'); +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul'); +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间'); +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Topu','印度','女性','现代'); +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Enrique','美国','男性','蓝调'); +MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Michel','英国','男性','流行'); + + +-------S2CompanyDemo +MERGE INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000); +MERGE INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473); +MERGE INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503); +MERGE INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000); +MERGE INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000); + +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000); +MERGE INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000); + +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_133',12300000000, 2300000000,30,30); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_134',12400000000, 2400000000,10,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_135',12500000000, 2500000000,30,30); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_136',12600000000, 2600000000,40,40); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_137',12700000000, 2700000000,50,50); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_138',12800000000, 2800000000,20,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_139',12900000000, 2900000000,60,70); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_140',13000000000, 3000000000,80,100); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_131',13100000000,3100000000, 10,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_132',13200000000, 3200000000,20,20); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_133',13300000000, 3300000000,30,30); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_134',13400000000, 3400000000,10,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_135',13500000000, 3500000000,30,30); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_136',13600000000, 3600000000,40,40); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_137',13700000000, 3700000000,50,50); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_138',13800000000, 3800000000,20,10); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_139',13900000000, 3900000000,60,70); +INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_140',14000000000, 4000000000,80,100); diff --git a/launchers/standalone/src/test/resources/db/data-h2.sql b/launchers/standalone/src/test/resources/db/data-h2.sql index 75b45404b..fd364a8c4 100644 --- a/launchers/standalone/src/test/resources/db/data-h2.sql +++ b/launchers/standalone/src/test/resources/db/data-h2.sql @@ -1,4 +1,9 @@ -- sample user +MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbTD12g9wGXESwL7+o7xUW90=','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1); +MERGE INTO s2_user (id, `name`, password, display_name, email) values (2, 'jack','123456','jack','jack@xx.com'); +MERGE INTO s2_user (id, `name`, password, display_name, email) values (3, 'tom','123456','tom','tom@xx.com'); +MERGE INTO s2_user (id, `name`, password, display_name, email, is_admin) values (4, 'lucy','123456','lucy','lucy@xx.com', 1); +MERGE INTO s2_user (id, `name`, password, display_name, email) values (5, 'alice','123456','alice','alice@xx.com'); ---The default value for the password is 123456 MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbdktJJYWw6A3rEmBUPzbn/6DNeYnD+y3mAwDKEMS3KVT','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1); MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (2, 'jack','c3VwZXJzb25pY0BiaWNvbWxGalmwa0h/trkh/3CWOYMDiku0Op1VmOfESIKmN0HG','MWERWefm/3hD6kYndF6JIg==','jack','jack@xx.com'); @@ -14,1096 +19,4 @@ MERGE INTO s2_available_date_info(`id`,`item_id` ,`type` ,`date_format` ,`sta values (3 , 3, 'dimension', 'yyyy-MM-dd', DATEADD('DAY', -28, CURRENT_DATE()), DATEADD('DAY', -1, CURRENT_DATE()), '[]', '2023-06-01', 'admin', '2023-06-01', 'admin'); MERGE INTO s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`created_by` ,`updated_at` ,`updated_by` ) -values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin'); - --- sample data -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000); -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000); -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000); -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000); -MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000); - ----demo data for semantic and chat -MERGE INTO s2_user_department (user_name, department) values ('jack','HR'); - -MERGE INTO s2_user_department (user_name, department) values ('jack','HR'); -MERGE INTO s2_user_department (user_name, department) values ('tom','sales'); -MERGE INTO s2_user_department (user_name, department) values ('lucy','marketing'); -MERGE INTO s2_user_department (user_name, department) values ('john','strategy'); -MERGE INTO s2_user_department (user_name, department) values ('alice','sales'); -MERGE INTO s2_user_department (user_name, department) values ('dean','marketing'); - -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'alice', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'jack', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', 'p2'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', 'p1'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', 'p3'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', 'p4'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', 'p5'); -INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', 'p4'); - - - - - -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.7636857512911863', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', '0.17663327393462436', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.38943688941552057', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.2715819955225307', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.9358210273119568', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.9364586435510802', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.9707723036513162', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', '0.8497763866782723', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.15504417761372413', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.9507563118298399', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.9746364180572994', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', '0.12869214941133378', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.3024970533288409', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.6639702099980812', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'lucy', '0.4929901454858626', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.06853040276026445', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.8488086078299616', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.8589111177125592', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.5576357066482228', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.8047888670006846', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.766944548494366', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.5280072184505449', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.9693343356046343', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', '0.12805203958456424', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', '0.16963603387027637', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.5901202956521101', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.12710364646712236', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.6346530909156196', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.12461289103639872', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', '0.9863947334662437', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', '0.48899961064192987', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'alice', '0.5382796792688207', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'dean', '0.3506568687014143', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.8633072449771709', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.13999135315363687', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.07258740493845894', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', '0.5244413940436958', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.13258670732966138', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.6015982054464575', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', '0.05513158944480323', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'alice', '0.6707121735296985', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.9330440339006469', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', '0.5630674323371607', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', '0.8720647566229917', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.8331899070546519', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', '0.6712876436249856', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', '0.6694409980332703', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.3703307480606334', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.775368688472696', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.9151205443267096', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.09543108823305857', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'dean', '0.7893992120771057', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.5119923080070498', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.49906724167974936', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.046258282700961884', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', '0.44843595680103954', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', '0.7743935471689718', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.5855299615656824', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.9412963512379853', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.8383247587082538', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'lucy', '0.14517876867236124', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.9327229861441061', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.19042326582894153', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.6029067818254513', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.21715964747214422', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', '0.34259842721045974', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.7064419016593382', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', '0.5725636566517865', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', '0.22332539583809208', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.8049036189055911', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', '0.6029674758974956', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.11884976360561716', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'alice', '0.7124916829130662', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.5893693718556829', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.602073304496253', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.10491061160039927', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.9006548872378379', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.8545144244288455', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.16915384987875726', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.2271640700690446', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.7807518577160636', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.8919859648888653', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.1564450687270359', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.5840549187653847', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', '0.2213255596777869', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', '0.07868261880306426', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.07710010861455818', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.5131249730162654', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.5035035055368601', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.8996978291173905', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.057442290722216294', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.6443079066865616', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', '0.7398098480748726', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'dean', '0.9835694815034591', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'john', '0.9879213445635557', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.4020136688147111', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.6698797170128024', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.17325132416789113', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'lucy', '0.5784229486763606', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.9185978183932058', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.5474783153973963', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.9730731954700215', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.5390873359288765', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'alice', '0.20522241320887713', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', '0.4088233242325021', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.7608047695853417', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.2749731221085713', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.06154055374702494', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.460668002022406', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.4474746325306228', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.5761666885467472', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'dean', '0.33233441360339655', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.7426534909874778', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', '0.5841437875889118', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.2818296500094526', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', '0.8670888843915217', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'alice', '0.5249294365740248', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.5483356748008438', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.7278566847412673', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.6779976902157362', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.09995341651736978', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.4528538159233879', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.5870756885301056', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.9842091927290255', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.04580936015706816', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.8814678270145769', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.06517379256096412', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.8769832364187129', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', '0.584562279025023', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.8102404090621375', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.11481653429176686', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.43422888918962554', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.0684414272594508', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.976546463969412', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.617906858141431', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.08663740247579998', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.7124944606691416', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.1321700521239627', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'jack', '0.3078946609431664', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.6149442855237194', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'alice', '0.5963801306980994', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.6999542038973406', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.4599112653446624', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.20300901401048832', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.39989705958717037', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.2486378364940327', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.16880398079144077', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.73927288385526', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.8645283506689198', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.3266940826759587', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.9195490073037541', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.9452523036658287', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.21269683438120535', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.7377502855387184', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.38981597634408716', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.7001799391999863', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.6616720024008785', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'dean', '0.497721735058096', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', '0.22255613760959603', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.05247640233319417', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'dean', '0.27237572107833363', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'alice', '0.9529452406380252', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.28243045060463157', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'lucy', '0.17880444250082506', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.035050038002381156', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.840803223728221', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.5318457377361356', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.9280332892460665', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.752354382202208', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'dean', '0.1866528331789219', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.7016165545791373', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.4191547989960899', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.7025516699007639', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.6160127317884274', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'alice', '0.91223094958137', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.4383056089013998', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'jack', '0.595750781166582', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'lucy', '0.9472349338730268', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'jack', '0.0519104588842193', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.48043983034526205', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.14754707786497478', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.36124288370035695', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'dean', '0.21777919493494613', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.22637666702475057', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.9378215576942598', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.3309229261144562', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.7602880453727515', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.9470462487873785', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.6770215935547629', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.1586074803669385', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'lucy', '0.2754855564794071', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.8355347738454384', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.7251813505573811', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', '0.006606625589642534', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.304832277753024', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.026368662837989554', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'tom', '0.6855977520602776', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'tom', '0.8193746826441749', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.021179295102459972', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.1533849522536005', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.18893553542301778', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.39870999343833624', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.9985665103520182', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.6961441157700171', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.9861933923851885', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', '0.993076500099477', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.4320547269058953', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.18441071030375877', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.1501504986117118', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.252021845734527', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'lucy', '0.24442701577183745', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.07563738855797564', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', '0.34247820646440985', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.9456979276862031', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.19494357263973816', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.9371493867882469', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.6136241316589367', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.8922330760877784', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'dean', '0.9001986074661864', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.4889702884422866', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.2689551234431401', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.5223573993758465', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'tom', '0.05042295556527243', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.2717147121880483', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.7397093309370814', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', '0.157064341631733', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'lucy', '0.7213399784998017', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'tom', '0.764081440588005', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.7514070600074144', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.611647412825278', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.6600796877195596', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', '0.8942204153751679', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.07398121085929721', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', '0.1652506990439564', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.5849759516111703', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.1672502732600889', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.7836135556233219', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'dean', '0.26181269644936356', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.6577275876355586', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.3067293364197956', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.8608288543866495', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.814283434116926', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'jack', '0.33993584425872936', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'john', '0.010812798859160089', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.5156558224263926', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'jack', '0.46320035330198406', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.2651020283994786', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.42467241545664147', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.3695905136678498', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'tom', '0.15269122123348644', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.6755688670583248', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'jack', '0.39064306179528907', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.36479296691952023', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'lucy', '0.5069249157662691', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.4785315495532231', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.7582526218052175', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.42064109605717914', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'dean', '0.5587757581237022', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'lucy', '0.3561686564964428', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7101688305173135', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.6518061375522985', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.7564485884156583', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.36531347293134464', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'jack', '0.5201689359070235', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'john', '0.7138792929290383', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.9751003716333827', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.5281906318027629', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.6291356541485003', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', '0.1938712974807698', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'john', '0.6267850210775459', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.4469970592043767', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.7690659124175409', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.13335067838090386', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'jack', '0.2966621725922035', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.5740481445089863', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'alice', '0.838028890036331', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', '0.8094354537628714', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'alice', '0.5552924586108698', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.49150373927678315', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.7264346889377966', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.9292830287297702', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.3905616258240767', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.15912349648571666', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'alice', '0.6030082006630102', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'lucy', '0.8712354035243679', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.7685306377211826', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.2869913942171415', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.7142615166855639', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.5625978475154423', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.13611601734791123', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'alice', '0.6977333962685311', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.35140477709778295', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.8805119222967716', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.7014124236538637', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.12759538003439375', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.7515403792213445', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'lucy', '0.03700239289885987', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.31674618364630946', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.4491378834800146', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.6742764131652571', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.5286362221140248', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.007890326473113496', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.8046560540950831', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7198364371127147', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.7400546712169153', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.16859870460868698', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.8462852684569557', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.010211452005474353', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'alice', '0.8617802368201087', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.21667479046797633', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.8667689615468714', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.16140709875863557', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.16713368182304666', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.8957484629768053', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'tom', '0.457835758220534', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.9435170960198477', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'jack', '0.9699253608913104', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.2309897429566834', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.7879705066452681', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.20795869239817255', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.4110352469382019', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'jack', '0.4979592772533561', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', '0.18810865430947044', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'tom', '0.5001240246982048', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'jack', '0.08341934160029707', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.04812784841651041', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.4655982693269717', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'dean', '0.8539357978460663', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'john', '0.9649541785823592', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.8243635648047365', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.929949719929735', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.055983276861168996', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'tom', '0.07845430274829746', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'alice', '0.28257674222099116', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.1578419214960578', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.7853118484860825', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.20790127125904156', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.8650538395535204', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.902116091225815', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'lucy', '0.48542770770171373', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.16725337150113984', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'lucy', '0.3157444453259486', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.565727220131555', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', '0.2531688065358064', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.9191434620980499', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.9224628853942058', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'jack', '0.3256288410730337', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', '0.9709152566761661', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.9794173893522709', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'alice', '0.16582064407977237', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.2652519246960059', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.04092489871261762', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.3020444893927522', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'john', '0.4655412764350543', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'dean', '0.9226436424888846', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.4707663393012884', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.3277970119243966', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'tom', '0.4730675479071551', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'jack', '0.10261940477901954', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'alice', '0.4148892373198616', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.2877219827348403', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.16212409974675845', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.9567425121214822', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.19795350030679149', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.6954199597749198', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'alice', '0.32884293488801164', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'john', '0.4789917995407148', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'lucy', '0.0698927593996298', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.3352267723792438', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.8085116661598726', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.17515060210353794', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.6006963088370202', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.8794167536704468', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.04091469320757368', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'tom', '0.6709116812690366', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.4850646101328463', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'tom', '0.547488212623346', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'dean', '0.6301717145008927', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'lucy', '0.06123370093612068', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'alice', '0.2545600223228257', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'john', '0.28355287519210803', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.3231348374147818', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.4585172495754063', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.7893945285152268', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'john', '0.6810596014794181', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'john', '0.7136031244915907', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'jack', '0.259734039051829', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.7759518703827996', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'john', '0.06288891046833589', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'dean', '0.8242980461154241', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.36590300307021595', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'lucy', '0.20254092528445444', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.5427356081880325', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.1467846603517391', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.8975527268892767', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'dean', '0.3483541520806722', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.6922544855316723', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.3690185253006011', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'tom', '0.7564541265683148', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', '0.3634152133342695', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.33740378933701987', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.7942640738315301', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.7894896778233523', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'jack', '0.7153281477198108', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'tom', '0.5546359859065261', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'john', '0.7727157385809087', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'dean', '0.8707097754747494', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'john', '0.3873936520764878', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.7590305068820566', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'john', '0.512826935863365', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'john', '0.19120284727846926', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'dean', '0.5382693105670825', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'john', '0.826241649014955', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.6133080470571559', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'jack', '0.6452862617544055', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'lucy', '0.3025772179023586', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '4.709864550322962E-4', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.024816355013726588', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'alice', '0.8407500495605565', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'alice', '0.8420879584266481', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'lucy', '0.2719224735814776', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'tom', '0.8939712577294938', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'dean', '0.8086189323362379', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'tom', '0.6063415085381448', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'tom', '0.39783242658234674', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.6085577206028068', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'tom', '0.5154289424127074', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'john', '0.878436600887031', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.5577906295015223', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'lucy', '0.1143260282925247', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.312756557275364', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.05548807854726956', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'tom', '0.12140791431139175', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.23897628700410234', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.22223137342481392', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.12379891645900953', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'john', '0.33729146112854247', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.8816768640060831', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -21, CURRENT_DATE()), 'jack', '0.6301700633426532', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'alice', '0.4566295223861714', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.1777378523933678', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.8163769471165477', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'tom', '0.4380805149704541', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.2987018822475964', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'dean', '0.6726495645391617', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.8394327461109705', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'dean', '0.820512945501936', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'tom', '0.1580105370757261', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -20, CURRENT_DATE()), 'jack', '0.9961450897279505', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'john', '0.6574891890500061', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'john', '0.5201205570085158', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'alice', '0.2445069633928285', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -17, CURRENT_DATE()), 'john', '0.3155229654901067', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'jack', '0.3665971881269575', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'john', '0.5544977915912215', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.15978771803015113', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'lucy', '0.038128748344929186', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'tom', '0.49026304025118594', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.5166802080526571', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.22568230066042194', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -28, CURRENT_DATE()), 'john', '0.9888634109849955', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'jack', '0.21022365182102054', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'john', '0.47052993358031114', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.25686122383263454', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.18929054223320718', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'jack', '0.7925339862375451', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -12, CURRENT_DATE()), 'john', '0.12613308249498645', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.7381524971311578', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'alice', '0.08639585437319919', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -27, CURRENT_DATE()), 'tom', '0.9519897106846164', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.33446548574801926', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'jack', '0.40667134603483324', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -10, CURRENT_DATE()), 'jack', '0.17100718420628735', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -26, CURRENT_DATE()), 'lucy', '0.4445585525686886', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'tom', '0.47372916928883013', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'john', '0.19826861093848824', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -13, CURRENT_DATE()), 'john', '0.13679268112019338', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -24, CURRENT_DATE()), 'tom', '0.9805515708224516', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'dean', '0.4738376165601095', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'dean', '0.5739441073158964', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'alice', '0.8428505498030564', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'lucy', '0.32655416551155336', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -14, CURRENT_DATE()), 'tom', '0.7055736367780644', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'tom', '0.9621355090189875', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -9, CURRENT_DATE()), 'jack', '0.9665339161730553', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'dean', '0.44309781869697995', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -18, CURRENT_DATE()), 'tom', '0.8651220802537761', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.6451892308277741', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -16, CURRENT_DATE()), 'dean', '0.056797307451316725', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.6847604118085596', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -23, CURRENT_DATE()), 'jack', '0.13428051757364667', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -29, CURRENT_DATE()), 'lucy', '0.9814797176951834', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -11, CURRENT_DATE()), 'tom', '0.7386074051153445', 'p3'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -25, CURRENT_DATE()), 'alice', '0.4825297824657663', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.06608870508231235', 'p5'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -22, CURRENT_DATE()), 'lucy', '0.6278253028988848', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'alice', '0.6705580511822682', 'p1'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -19, CURRENT_DATE()), 'alice', '0.8131712486302015', 'p2'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -15, CURRENT_DATE()), 'lucy', '0.8124302447925607', 'p4'); -INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATEADD('DAY', -8, CURRENT_DATE()), 'lucy', '0.039935860913407284', 'p2'); - - - -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国'); -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国'); -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚'); -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国'); -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大'); -MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国'); - -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore'); -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul'); -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间'); -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Topu','印度','女性','现代'); -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Enrique','美国','男性','蓝调'); -MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Michel','英国','男性','流行'); - -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (1,'Shrikanta','3.78 MB','3:45','mp4'); -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (2,'Prity','4.12 MB','2:56','mp3'); -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (3,'Farida','3.69 MB','4:12','mp4'); -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (4,'Enrique','4.58 MB','5:23','mp4'); -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (5,'Michel','5.10 MB','4:34','mp3'); -MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (6,'Topu','4.10 MB','4:30','mp4'); - -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'Tumi 长袍 尼罗布','Shrikanta','印度',1,'tagore',8,'孟加拉语','28-AUG-2011',1080); -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'舒克诺 帕塔尔 努普尔 帕埃','Prity','孟加拉国',2,'nazrul',5,'孟加拉语','21-SEP-1997',512); -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'阿米·奥帕尔·霍伊','Farida','孟加拉国',3,'民间',7,'孟加拉语','7-APR-2001',320); -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'我的爱','Enrique','美国',4,'蓝调',6,'英文','24-JAN-2007',1080); -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'打败它','Michel','英国',5,'流行',8,'英文','17-MAR-2002',720); -MERGE INTO song(imp_date,song_name,artist_name,country,f_id,g_name,rating,languages,releasedate,resolution) VALUES (DATEADD('DAY', 0, CURRENT_DATE()),'阿杰伊阿卡什','Topu','印度',6,'现代',10,'孟加拉语','27-MAR-2004',320); - - -MERGE into company(imp_date,company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_131','百度集团','北京','2000','李彦宏','李彦宏',102300000000,40000); -MERGE into company(imp_date,company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_132','阿里巴巴集团','杭州','1999年','马云','张勇',376800000000,103699); -MERGE into company(imp_date,company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_133','深圳市腾讯计算机系统有限公司','深圳','1998','马化腾','刘炽平',321600000000,56310); -MERGE into company(imp_date,company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_134','北京京东世纪贸易有限公司','北京','1998','刘强东','刘强东',28800000000,179000); -MERGE into company(imp_date,company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_135','网易公司','杭州','1997','丁磊','丁磊',67500000000,20000); - -MERGE into brand(imp_date,brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_136','阿里云','2009年9月10日','item_enterprise_13_132','张勇',50000000); -MERGE into brand(imp_date,brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_137','天猫','2012年1月11日','item_enterprise_13_132','张勇',100000000); -MERGE into brand(imp_date,brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_138','腾讯游戏','2003','item_enterprise_13_133','马化腾',50000000); -MERGE into brand(imp_date,brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_139','度小满','2018','item_enterprise_13_131','朱光',100000000); -MERGE into brand(imp_date,brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES (DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_140','京东金融','2017','item_enterprise_13_134','刘强东',100000000); - -insert into company_revenue(imp_date,company_id,brand_id,revenue_proportion,profit_proportion,expenditure_proportion) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_131','item_enterprise_13_139',10,10,30); -insert into company_revenue(imp_date,company_id,brand_id,revenue_proportion,profit_proportion,expenditure_proportion) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_133','item_enterprise_13_138',80,80,60); -insert into company_revenue(imp_date,company_id,brand_id,revenue_proportion,profit_proportion,expenditure_proportion) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_134','item_enterprise_13_140',80,80,60); -insert into company_revenue(imp_date,company_id,brand_id,revenue_proportion,profit_proportion,expenditure_proportion) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_132','item_enterprise_13_137',80,80,60); -insert into company_revenue(imp_date,company_id,brand_id,revenue_proportion,profit_proportion,expenditure_proportion) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'item_enterprise_13_132','item_enterprise_13_136',10,10,30); - -insert into company_brand_revenue(imp_date,year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '2018','item_enterprise_13_138',500000000,-300000000,10,-10); -insert into company_brand_revenue(imp_date,year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '2019','item_enterprise_13_136',100000000000,50000000000,100,50); -insert into company_brand_revenue(imp_date,year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'2018','item_enterprise_13_137',100000000000,50000000000,100,-10); -insert into company_brand_revenue(imp_date,year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '2018','item_enterprise_13_139',500000000,50000000000,10,50); -insert into company_brand_revenue(imp_date,year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ( DATEADD('DAY', -1, CURRENT_DATE()),'2018','item_enterprise_13_140',100000000000,-300000000,10,50); - --- benchmark +values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin'); \ No newline at end of file diff --git a/launchers/standalone/src/test/resources/db/schema-h2-demo.sql b/launchers/standalone/src/test/resources/db/schema-h2-demo.sql new file mode 100644 index 000000000..20f361cb7 --- /dev/null +++ b/launchers/standalone/src/test/resources/db/schema-h2-demo.sql @@ -0,0 +1,85 @@ +-------S2VisitsDemo +CREATE TABLE IF NOT EXISTS `s2_user_department` ( + `user_name` varchar(200) NOT NULL, + `department` varchar(200) NOT NULL, -- department of user + PRIMARY KEY (`user_name`,`department`) + ); +COMMENT ON TABLE s2_user_department IS 'user_department_info'; + +CREATE TABLE IF NOT EXISTS `s2_pv_uv_statis` ( + `imp_date` varchar(200) NOT NULL, + `user_name` varchar(200) NOT NULL, + `page` varchar(200) NOT NULL + ); +COMMENT ON TABLE s2_pv_uv_statis IS 's2_pv_uv_statis'; + +CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` ( + `imp_date` varchar(200) NOT NULL, + `user_name` varchar(200) NOT NULL, + `stay_hours` DOUBLE NOT NULL, + `page` varchar(200) NOT NULL + ); +COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info'; + +-------S2ArtistDemo +CREATE TABLE IF NOT EXISTS `singer` ( + `singer_name` varchar(200) NOT NULL, + `act_area` varchar(200) NOT NULL, + `song_name` varchar(200) NOT NULL, + `genre` varchar(200) NOT NULL, + `js_play_cnt` bigINT DEFAULT NULL, + `down_cnt` bigINT DEFAULT NULL, + `favor_cnt` bigINT DEFAULT NULL, + PRIMARY KEY (`singer_name`) + ); +COMMENT ON TABLE singer IS 'singer_info'; + +CREATE TABLE IF NOT EXISTS `genre` ( + `g_name` varchar(20) NOT NULL , -- genre name + `rating` INT , + `most_popular_in` varchar(50) , + PRIMARY KEY (`g_name`) + ); +COMMENT ON TABLE genre IS 'genre'; + +CREATE TABLE IF NOT EXISTS `artist` ( + `artist_name` varchar(50) NOT NULL , -- genre name + `citizenship` varchar(20) , + `gender` varchar(20) , + `g_name` varchar(50), + PRIMARY KEY (`artist_name`,`citizenship`) + ); +COMMENT ON TABLE artist IS 'artist'; + +-------S2CompanyDemo +CREATE TABLE IF NOT EXISTS `company` ( + `company_id` varchar(50) NOT NULL , + `company_name` varchar(50) NOT NULL , + `headquarter_address` varchar(50) NOT NULL , + `company_established_time` varchar(20) NOT NULL , + `founder` varchar(20) NOT NULL , + `ceo` varchar(20) NOT NULL , + `annual_turnover` bigint(15) , + `employee_count` int(7) , + PRIMARY KEY (`company_id`) + ); + +CREATE TABLE IF NOT EXISTS `brand` ( + `brand_id` varchar(50) NOT NULL , + `brand_name` varchar(50) NOT NULL , + `brand_established_time` varchar(20) NOT NULL , + `company_id` varchar(50) NOT NULL , + `legal_representative` varchar(20) NOT NULL , + `registered_capital` bigint(15) , + PRIMARY KEY (`brand_id`) + ); + +CREATE TABLE IF NOT EXISTS `brand_revenue` ( + `year_time` varchar(10) NOT NULL , + `brand_id` varchar(50) NOT NULL , + `revenue` bigint(15) NOT NULL, + `profit` bigint(15) NOT NULL , + `revenue_growth_year_on_year` double NOT NULL , + `profit_growth_year_on_year` double NOT NULL + ); + From b84dde3799da721e8acf9e208e59909356da71a7 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Tue, 26 Nov 2024 10:40:27 +0800 Subject: [PATCH 16/88] [improvement][headless]Clean code logic of headless translator. --- .../headless/core/pojo/Database.java | 3 +- .../translator/DefaultSemanticTranslator.java | 26 ++++--- .../calcite/CalciteQueryParser.java | 3 +- .../translator/calcite/sql/SchemaBuilder.java | 2 +- .../translator/calcite/sql/SqlBuilder.java | 10 ++- .../calcite/sql/node/DataModelNode.java | 5 +- .../calcite/sql/render/FilterRender.java | 2 +- .../calcite/sql/render/JoinRender.java | 11 ++- .../calcite/sql/render/OutputRender.java | 2 +- .../calcite/sql/render/SourceRender.java | 6 +- .../converter/MetricRatioConverter.java | 3 +- .../converter/SqlQueryConverter.java | 68 +++++++++---------- .../converter/SqlVariableConverter.java | 6 +- .../converter/StructQueryConverter.java | 14 ++-- .../headless/core/utils/SqlUtils.java | 6 +- .../server/utils/DatabaseConverter.java | 2 + .../calcite/HeadlessParserServiceTest.java | 6 +- 17 files changed, 84 insertions(+), 91 deletions(-) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/Database.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/Database.java index 51a908024..5227166cc 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/Database.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/Database.java @@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo; import com.google.common.collect.Lists; import com.tencent.supersonic.common.pojo.RecordInfo; +import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.util.AESEncryptionUtil; import lombok.AllArgsConstructor; import lombok.Builder; @@ -36,7 +37,7 @@ public class Database extends RecordInfo { private String schema; /** mysql,clickhouse */ - private String type; + private EngineType type; private List admins = Lists.newArrayList(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 8474ce2fc..662819598 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -50,42 +50,40 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } } catch (Exception e) { queryStatement.setErrMsg(e.getMessage()); - log.error("Failed to translate semantic query [{}]", e); + log.error("Failed to translate semantic query [{}]", e.getMessage(), e); } } private void doOntologyParse(QueryStatement queryStatement) throws Exception { OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); - SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); log.info("parse with ontology: [{}]", ontologyQueryParam); ComponentFactory.getQueryParser().parse(queryStatement); - String ontologyQueryTable = sqlQueryParam.getTable(); - String ontologyQuerySql = sqlQueryParam.getSql(); - String ontologySql = queryStatement.getSql(); - if (!queryStatement.isOk()) { throw new Exception(String.format("parse ontology table [%s] error [%s]", - ontologyQueryTable, queryStatement.getErrMsg())); + queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg())); } - List> tables = new ArrayList<>(); + SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); + String ontologyQuerySql = sqlQueryParam.getSql(); + String ontologyInnerTable = sqlQueryParam.getTable(); + String ontologyInnerSql = queryStatement.getSql(); - tables.add(Pair.of(ontologyQueryTable, ontologySql)); + List> tables = new ArrayList<>(); + tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql)); if (sqlQueryParam.isSupportWith()) { - EngineType engineType = - EngineType.fromString(queryStatement.getOntology().getDatabase().getType()); + EngineType engineType = queryStatement.getOntology().getDatabase().getType(); if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) { String withSql = "with " + tables.stream() .map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight())) .collect(Collectors.joining(",")) + "\n" + ontologyQuerySql; queryStatement.setSql(withSql); } else { - List parentTableList = + List withTableList = tables.stream().map(Pair::getLeft).collect(Collectors.toList()); - List parentSqlList = + List withSqlList = tables.stream().map(Pair::getRight).collect(Collectors.toList()); String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql, - parentSqlList, parentTableList); + withSqlList, withTableList); queryStatement.setSql(mergeSql); } } else { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java index 1deb240ee..8eed9bc2c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/CalciteQueryParser.java @@ -28,7 +28,8 @@ public class CalciteQueryParser implements QueryParser { .enableOptimize(queryStatement.getEnableOptimize()).build()) .build(); SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema); - sqlBuilder.buildOntologySql(queryStatement); + String sql = sqlBuilder.buildOntologySql(queryStatement); + queryStatement.setSql(sql); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java index 43949e000..c3ec52b9e 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SchemaBuilder.java @@ -33,7 +33,7 @@ public class SchemaBuilder { Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema, Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory, Configuration.config); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, Configuration.typeFactory, Configuration.getValidatorConfig(engineType)); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index e6db7eabe..a3b18076f 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -35,7 +35,7 @@ public class SqlBuilder { this.schema = schema; } - public void buildOntologySql(QueryStatement queryStatement) throws Exception { + public String buildOntologySql(QueryStatement queryStatement) throws Exception { this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); if (ontologyQueryParam.getMetrics() == null) { ontologyQueryParam.setMetrics(new ArrayList<>()); @@ -50,14 +50,12 @@ public class SqlBuilder { buildParseNode(); Database database = queryStatement.getOntology().getDatabase(); - EngineType engineType = EngineType.fromString(database.getType()); - optimizeParseNode(engineType); - String sql = getSql(engineType); - queryStatement.setSql(sql); + optimizeParseNode(database.getType()); + return getSql(database.getType()); } private void buildParseNode() throws Exception { - // find the match Datasource + // find relevant data models scope = SchemaBuilder.getScope(schema); List dataModels = DataModelNode.getRelatedDataModels(scope, schema, ontologyQueryParam); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index a648b4122..593b73cf5 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -168,7 +168,7 @@ public class DataModelNode extends SemanticNode { public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, OntologyQueryParam metricCommand, Set queryDimension, List measures, SqlValidatorScope scope) throws Exception { - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { Set filterConditions = new HashSet<>(); FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), @@ -229,8 +229,7 @@ public class DataModelNode extends SemanticNode { } filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(dimension); - EngineType engineType = - EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope); boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java index 6ad65aa1b..90504aff1 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/FilterRender.java @@ -32,7 +32,7 @@ public class FilterRender extends Renderer { SqlNode filterNode = null; List queryMetrics = new ArrayList<>(metricCommand.getMetrics()); List queryDimensions = new ArrayList<>(metricCommand.getDimensions()); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index 5d04eef8f..c34c6c18b 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -50,7 +50,7 @@ public class JoinRender extends Renderer { public void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String queryWhere = metricCommand.getWhere(); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); if (queryWhere != null && !queryWhere.isEmpty()) { @@ -146,7 +146,7 @@ public class JoinRender extends Renderer { Set sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); for (String m : reqMetrics) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias); @@ -181,7 +181,7 @@ public class JoinRender extends Renderer { Set dimension, SqlValidatorScope scope, S2CalciteSchema schema) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); for (String d : reqDimensions) { if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) { @@ -261,7 +261,7 @@ public class JoinRender extends Renderer { private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map before, DataModel dataModel, S2CalciteSchema schema, SqlValidatorScope scope) throws Exception { - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); SqlNode condition = getCondition(leftTable, tableView, dataModel, schema, scope, engineType); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); @@ -454,8 +454,7 @@ public class JoinRender extends Renderer { endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); dateTime = partMetric.getAlias() + "." + partTime.get().getName(); } - EngineType engineType = - EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); ArrayList operandList = new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType), SemanticNode.parse(dateTime, scope, engineType))); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java index 9a516cefc..4ab0daa35 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/OutputRender.java @@ -25,7 +25,7 @@ public class OutputRender extends Renderer { public void render(OntologyQueryParam metricCommand, List dataModels, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView selectDataSet = super.tableView; - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); for (String dimension : metricCommand.getDimensions()) { selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType)); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index 19fea0587..3a777a676 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -109,7 +109,7 @@ public class SourceRender extends Renderer { S2CalciteSchema schema, boolean nonAgg, Map extendFields, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { List dimensionList = schema.getDimensions().get(datasource.getName()); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); boolean isAdd = false; if (!CollectionUtils.isEmpty(dimensionList)) { for (Dimension dim : dimensionList) { @@ -187,7 +187,7 @@ public class SourceRender extends Renderer { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { Iterator iterator = fields.iterator(); List whereNode = new ArrayList<>(); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); while (iterator.hasNext()) { String cur = iterator.next(); if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) { @@ -341,7 +341,7 @@ public class SourceRender extends Renderer { String queryWhere = ontologyQueryParam.getWhere(); Set whereFields = new HashSet<>(); List fieldWhere = new ArrayList<>(); - EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); + EngineType engineType = schema.getOntology().getDatabase().getType(); if (queryWhere != null && !queryWhere.isEmpty()) { SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType); FilterNode.getFilterField(sqlNode, whereFields); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java index 285ea655b..05dd0fcfb 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/MetricRatioConverter.java @@ -62,8 +62,7 @@ public class MetricRatioConverter implements QueryConverter { @Override public void convert(QueryStatement queryStatement) throws Exception { Database database = queryStatement.getOntology().getDatabase(); - generateRatioSql(queryStatement, EngineType.fromString(database.getType().toUpperCase()), - database.getVersion()); + generateRatioSql(queryStatement, database.getType(), database.getVersion()); } /** Ratio */ diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java index 1b2e55362..9b46c8388 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java @@ -34,31 +34,24 @@ public class SqlQueryConverter implements QueryConverter { @Override public boolean accept(QueryStatement queryStatement) { - if (Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL()) { - return true; - } - return false; + return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL(); } @Override public void convert(QueryStatement queryStatement) throws Exception { - SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); convertNameToBizName(queryStatement); rewriteFunction(queryStatement); - String reqSql = queryStatement.getSqlQueryParam().getSql(); - String tableName = SqlSelectHelper.getTableName(reqSql); + rewriteOrderBy(queryStatement); + + // fill sqlQuery + SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); + SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); + String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql()); if (StringUtils.isEmpty(tableName)) { return; } - - // replace order by field with the select sequence number - queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderbyField(reqSql)); - log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql()); - - SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - // fill dataSetQuery - SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); sqlQueryParam.setTable(tableName.toLowerCase()); + SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); if (!sqlGenerateUtils.isSupportWith( EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()), semanticSchemaResp.getDatabaseResp().getVersion())) { @@ -67,27 +60,26 @@ public class SqlQueryConverter implements QueryConverter { } // build ontologyQuery - List allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql()); + List allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql()); List metricSchemas = getMetrics(semanticSchemaResp, allFields); List metrics = metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); - AggOption aggOption = getAggOption(queryStatement, metricSchemas); + AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas); Set dimensions = getDimensions(semanticSchemaResp, allFields); OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); ontologyQueryParam.getMetrics().addAll(metrics); ontologyQueryParam.getDimensions().addAll(dimensions); ontologyQueryParam.setAggOption(aggOption); ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption)); - - log.info("parse sqlQuery [{}] ", sqlQueryParam); queryStatement.setOntologyQueryParam(ontologyQueryParam); - queryStatement.setSql(sqlQueryParam.getSql()); + generateDerivedMetric(sqlGenerateUtils, queryStatement); + + queryStatement.setSql(sqlQueryParam.getSql()); + log.info("parse sqlQuery [{}] ", sqlQueryParam); } - private AggOption getAggOption(QueryStatement queryStatement, - List metricSchemas) { - String sql = queryStatement.getSql(); + private AggOption getAggOption(String sql, List metricSchemas) { if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) { return AggOption.AGGREGATION; } @@ -148,30 +140,31 @@ public class SqlQueryConverter implements QueryConverter { private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, QueryStatement queryStatement) { SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); - SqlQueryParam dsParam = queryStatement.getSqlQueryParam(); - OntologyQueryParam ontology = queryStatement.getOntologyQueryParam(); - String sql = dsParam.getSql(); + SqlQueryParam sqlParam = queryStatement.getSqlQueryParam(); + OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam(); + String sql = sqlParam.getSql(); Set measures = new HashSet<>(); Map replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp, - ontology.getAggOption(), ontology.getMetrics(), ontology.getDimensions(), measures); + ontologyParam.getAggOption(), ontologyParam.getMetrics(), + ontologyParam.getDimensions(), measures); if (!CollectionUtils.isEmpty(replaces)) { // metricTable sql use measures replace metric sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces); - ontology.setAggOption(AggOption.NATIVE); + ontologyParam.setAggOption(AggOption.NATIVE); // metricTable use measures replace metric if (!CollectionUtils.isEmpty(measures)) { - ontology.getMetrics().addAll(measures); + ontologyParam.getMetrics().addAll(measures); } else { // empty measure , fill default - ontology.setMetrics(new ArrayList<>()); - ontology.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( - getDefaultModel(semanticSchemaResp, ontology.getDimensions()))); + ontologyParam.setMetrics(new ArrayList<>()); + ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( + getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions()))); } } - dsParam.setSql(sql); + sqlParam.setSql(sql); } private Map generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, @@ -245,6 +238,13 @@ public class SqlQueryConverter implements QueryConverter { queryStatement.getSqlQueryParam().setSql(sql); } + private void rewriteOrderBy(QueryStatement queryStatement) { + // replace order by field with the select sequence number + String sql = queryStatement.getSqlQueryParam().getSql(); + String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql); + log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql); + queryStatement.getSqlQueryParam().setSql(newSql); + } private void rewriteFunction(QueryStatement queryStatement) { SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); @@ -300,7 +300,7 @@ public class SqlQueryConverter implements QueryConverter { } return modelMatchCnt.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) - .map(m -> m.getKey()).findFirst().orElse(""); + .map(Map.Entry::getKey).findFirst().orElse(""); } return semanticSchemaResp.getModelResps().get(0).getBizName(); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java index 299ea4609..d0db39fb1 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlVariableConverter.java @@ -19,10 +19,8 @@ public class SqlVariableConverter implements QueryConverter { @Override public boolean accept(QueryStatement queryStatement) { - if (Objects.isNull(queryStatement.getStructQueryParam()) && queryStatement.getIsS2SQL()) { - return false; - } - return true; + return Objects.nonNull(queryStatement.getStructQueryParam()) + && !queryStatement.getIsS2SQL(); } @Override diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java index 38aa00794..0366cc037 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/StructQueryConverter.java @@ -1,7 +1,7 @@ package com.tencent.supersonic.headless.core.translator.converter; +import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.ColumnOrder; -import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.core.pojo.Database; @@ -22,11 +22,8 @@ public class StructQueryConverter implements QueryConverter { @Override public boolean accept(QueryStatement queryStatement) { - if (Objects.nonNull(queryStatement.getStructQueryParam()) && !queryStatement.getIsS2SQL()) { - return true; - } - - return false; + return Objects.nonNull(queryStatement.getStructQueryParam()) + && !queryStatement.getIsS2SQL(); } @Override @@ -43,8 +40,7 @@ public class StructQueryConverter implements QueryConverter { sqlGenerateUtils.getOrderBy(structQueryParam), sqlGenerateUtils.getLimit(structQueryParam)); Database database = queryStatement.getOntology().getDatabase(); - EngineType engineType = EngineType.fromString(database.getType().toUpperCase()); - if (!sqlGenerateUtils.isSupportWith(engineType, database.getVersion())) { + if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) { sqlParam.setSupportWith(false); sql = String.format("select %s from %s t0 %s %s %s", sqlGenerateUtils.getSelect(structQueryParam), dsTable, @@ -58,7 +54,7 @@ public class StructQueryConverter implements QueryConverter { OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups()); ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream() - .map(a -> a.getColumn()).collect(Collectors.toList())); + .map(Aggregator::getColumn).collect(Collectors.toList())); String where = sqlGenerateUtils.generateWhere(structQueryParam, null); ontologyQueryParam.setWhere(where); ontologyQueryParam.setAggOption(AggOption.AGGREGATION); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlUtils.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlUtils.java index 04cf92616..339586bf0 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlUtils.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/utils/SqlUtils.java @@ -3,6 +3,7 @@ package com.tencent.supersonic.headless.core.utils; import javax.sql.DataSource; import com.tencent.supersonic.common.pojo.QueryColumn; +import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.util.DateUtils; import com.tencent.supersonic.headless.api.pojo.enums.DataType; import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp; @@ -64,7 +65,7 @@ public class SqlUtils { public SqlUtils init(Database database) { return SqlUtilsBuilder.getBuilder() .withName(database.getId() + AT_SYMBOL + database.getName()) - .withType(database.getType()).withJdbcUrl(database.getUrl()) + .withType(database.getType().getName()).withJdbcUrl(database.getUrl()) .withUsername(database.getUsername()).withPassword(database.getPassword()) .withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit) .withIsQueryLogEnable(this.isQueryLogEnable).build(); @@ -224,7 +225,8 @@ public class SqlUtils { } public SqlUtils build() { - Database database = Database.builder().name(this.name).type(this.type).url(this.jdbcUrl) + Database database = Database.builder().name(this.name) + .type(EngineType.fromString(this.type.toUpperCase())).url(this.jdbcUrl) .username(this.username).password(this.password).build(); SqlUtils sqlUtils = new SqlUtils(database); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DatabaseConverter.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DatabaseConverter.java index 95acf699f..e7e9551f1 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DatabaseConverter.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/DatabaseConverter.java @@ -1,6 +1,7 @@ package com.tencent.supersonic.headless.server.utils; import com.alibaba.fastjson.JSONObject; +import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.core.pojo.ConnectInfo; @@ -16,6 +17,7 @@ public class DatabaseConverter { public static Database convert(DatabaseResp databaseResp) { Database database = new Database(); BeanUtils.copyProperties(databaseResp, database); + database.setType(EngineType.fromString(databaseResp.getType().toUpperCase())); return database; } diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index 38138d376..a0a8080da 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -29,9 +29,9 @@ class HeadlessParserServiceTest { SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); QueryStatement queryStatement = new QueryStatement(); queryStatement.setOntologyQueryParam(ontologyQueryParam); - aggBuilder.buildOntologySql(queryStatement); - EngineType engineType = - EngineType.fromString(semanticSchema.getOntology().getDatabase().getType()); + String sql = aggBuilder.buildOntologySql(queryStatement); + queryStatement.setSql(sql); + EngineType engineType = semanticSchema.getOntology().getDatabase().getType(); sqlParser.setSql(aggBuilder.getSql(engineType)); } catch (Exception e) { sqlParser.setErrMsg(e.getMessage()); From 40bfcdce2c7444ab38021577191a318a5f0312ab Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Tue, 26 Nov 2024 15:21:57 +0800 Subject: [PATCH 17/88] [improvement][headless]Clean code logic of headless translator. --- .../calcite/s2sql/OntologyQueryParam.java | 7 +-- .../translator/calcite/sql/SqlBuilder.java | 6 --- .../calcite/sql/node/DataModelNode.java | 45 +++++++++---------- .../calcite/sql/render/JoinRender.java | 14 +++--- .../calcite/sql/render/SourceRender.java | 24 +++++----- .../converter/SqlQueryConverter.java | 7 ++- .../server/manager/SemanticSchemaManager.java | 42 ++--------------- .../server/service/SchemaService.java | 11 +---- .../service/impl/SchemaServiceImpl.java | 4 -- .../calcite/HeadlessParserServiceTest.java | 9 ++-- .../tencent/supersonic/demo/S2VisitsDemo.java | 4 +- .../headless/QueryByMetricTest.java | 1 + .../supersonic/headless/TranslateTest.java | 2 +- .../tencent/supersonic/util/DataUtils.java | 8 ++-- 14 files changed, 65 insertions(+), 119 deletions(-) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java index b399eb486..90711c5da 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/OntologyQueryParam.java @@ -1,16 +1,17 @@ package com.tencent.supersonic.headless.core.translator.calcite.s2sql; -import com.google.common.collect.Lists; +import com.google.common.collect.Sets; import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import lombok.Data; import java.util.List; +import java.util.Set; @Data public class OntologyQueryParam { - private List metrics = Lists.newArrayList(); - private List dimensions = Lists.newArrayList(); + private Set metrics = Sets.newHashSet(); + private Set dimensions = Sets.newHashSet(); private String where; private Long limit; private List order; diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index a3b18076f..570854a07 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -37,12 +37,6 @@ public class SqlBuilder { public String buildOntologySql(QueryStatement queryStatement) throws Exception { this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); - if (ontologyQueryParam.getMetrics() == null) { - ontologyQueryParam.setMetrics(new ArrayList<>()); - } - if (ontologyQueryParam.getDimensions() == null) { - ontologyQueryParam.setDimensions(new ArrayList<>()); - } if (ontologyQueryParam.getLimit() == null) { ontologyQueryParam.setLimit(0L); } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index 593b73cf5..5d4b38b49 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -150,23 +150,23 @@ public class DataModelNode extends SemanticNode { } public static void getQueryDimensionMeasure(S2CalciteSchema schema, - OntologyQueryParam metricCommand, Set queryDimension, List measures) { - queryDimension.addAll(metricCommand.getDimensions().stream() + OntologyQueryParam queryParam, Set queryDimensions, Set queryMeasures) { + queryDimensions.addAll(queryParam.getDimensions().stream() .map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d) .collect(Collectors.toSet())); Set schemaMetricName = schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); - schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName())) + schema.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName())) .forEach(m -> m.getMetricTypeParams().getMeasures().stream() - .forEach(mm -> measures.add(mm.getName()))); - metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)) - .forEach(m -> measures.add(m)); + .forEach(mm -> queryMeasures.add(mm.getName()))); + queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)) + .forEach(m -> queryMeasures.add(m)); } public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, - OntologyQueryParam metricCommand, Set queryDimension, List measures, + OntologyQueryParam metricCommand, Set queryDimension, Set measures, SqlValidatorScope scope) throws Exception { EngineType engineType = schema.getOntology().getDatabase().getType(); if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { @@ -192,20 +192,20 @@ public class DataModelNode extends SemanticNode { } public static List getRelatedDataModels(SqlValidatorScope scope, - S2CalciteSchema schema, OntologyQueryParam metricCommand) throws Exception { + S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception { List dataModels = new ArrayList<>(); // check by metric - List measures = new ArrayList<>(); - Set queryDimension = new HashSet<>(); - getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures); + Set queryMeasures = new HashSet<>(); + Set queryDimensions = new HashSet<>(); + getQueryDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures); DataModel baseDataModel = null; // one , match measure count Map dataSourceMeasures = new HashMap<>(); for (Map.Entry entry : schema.getDataModels().entrySet()) { Set sourceMeasure = entry.getValue().getMeasures().stream() .map(mm -> mm.getName()).collect(Collectors.toSet()); - sourceMeasure.retainAll(measures); + sourceMeasure.retainAll(queryMeasures); dataSourceMeasures.put(entry.getKey(), sourceMeasure.size()); } log.info("metrics: [{}]", dataSourceMeasures); @@ -230,17 +230,17 @@ public class DataModelNode extends SemanticNode { filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(dimension); EngineType engineType = schema.getOntology().getDatabase().getType(); - mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, + mergeQueryFilterDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures, scope); - boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, - metricCommand, scope, engineType); + boolean isAllMatch = checkMatch(sourceMeasure, queryDimensions, queryMeasures, + dimension, queryParam, scope, engineType); if (isAllMatch) { log.debug("baseDataModel match all "); return dataModels; } // find all dataSource has the same identifiers - List linkDataModels = getLinkDataSourcesByJoinRelation(queryDimension, - measures, baseDataModel, schema); + List linkDataModels = getLinkDataSourcesByJoinRelation(queryDimensions, + queryMeasures, baseDataModel, schema); if (CollectionUtils.isEmpty(linkDataModels)) { log.debug("baseDataModel get by identifiers "); Set baseIdentifiers = baseDataModel.getIdentifiers().stream() @@ -249,24 +249,23 @@ public class DataModelNode extends SemanticNode { throw new Exception( "datasource error : " + baseDataModel.getName() + " miss identifier"); } - linkDataModels = getLinkDataSources(baseIdentifiers, queryDimension, measures, + linkDataModels = getLinkDataSources(baseIdentifiers, queryDimensions, queryMeasures, baseDataModel, schema); if (linkDataModels.isEmpty()) { throw new Exception(String.format( "not find the match datasource : dimension[%s],measure[%s]", - queryDimension, measures)); + queryDimensions, queryMeasures)); } } log.debug("linkDataModels {}", linkDataModels); return linkDataModels; - // dataModels.addAll(linkDataModels); } return dataModels; } private static boolean checkMatch(Set sourceMeasure, Set queryDimension, - List measures, Set dimension, OntologyQueryParam metricCommand, + Set measures, Set dimension, OntologyQueryParam metricCommand, SqlValidatorScope scope, EngineType engineType) throws Exception { boolean isAllMatch = true; sourceMeasure.retainAll(measures); @@ -299,7 +298,7 @@ public class DataModelNode extends SemanticNode { } private static List getLinkDataSourcesByJoinRelation(Set queryDimension, - List measures, DataModel baseDataModel, S2CalciteSchema schema) { + Set measures, DataModel baseDataModel, S2CalciteSchema schema) { Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); Set before = new HashSet<>(); @@ -383,7 +382,7 @@ public class DataModelNode extends SemanticNode { } private static List getLinkDataSources(Set baseIdentifiers, - Set queryDimension, List measures, DataModel baseDataModel, + Set queryDimension, Set measures, DataModel baseDataModel, S2CalciteSchema schema) { Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index c34c6c18b..531a773b4 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -59,7 +59,7 @@ public class JoinRender extends Renderer { fieldWhere = whereFields.stream().collect(Collectors.toList()); } Set queryAllDimension = new HashSet<>(); - List measures = new ArrayList<>(); + Set measures = new HashSet<>(); DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); SqlNode left = null; TableView leftTable = null; @@ -73,8 +73,8 @@ public class JoinRender extends Renderer { final DataModel dataModel = dataModels.get(i); final Set filterDimensions = new HashSet<>(); final Set filterMetrics = new HashSet<>(); - final List queryDimension = new ArrayList<>(); - final List queryMetrics = new ArrayList<>(); + final Set queryDimension = new HashSet<>(); + final Set queryMetrics = new HashSet<>(); SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema, filterDimensions, filterMetrics); List reqMetric = new ArrayList<>(metricCommand.getMetrics()); @@ -142,7 +142,7 @@ public class JoinRender extends Renderer { } private void doMetric(Map innerSelect, TableView filterView, - List queryMetrics, List reqMetrics, DataModel dataModel, + Set queryMetrics, List reqMetrics, DataModel dataModel, Set sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); @@ -177,7 +177,7 @@ public class JoinRender extends Renderer { } private void doDimension(Map innerSelect, Set filterDimension, - List queryDimension, List reqDimensions, DataModel dataModel, + Set queryDimension, List reqDimensions, DataModel dataModel, Set dimension, SqlValidatorScope scope, S2CalciteSchema schema) throws Exception { String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); @@ -205,7 +205,7 @@ public class JoinRender extends Renderer { } private boolean getMatchMetric(S2CalciteSchema schema, Set sourceMeasure, String m, - List queryMetrics) { + Set queryMetrics) { Optional metric = schema.getMetrics().stream() .filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst(); boolean isAdd = false; @@ -226,7 +226,7 @@ public class JoinRender extends Renderer { } private boolean getMatchDimension(S2CalciteSchema schema, Set sourceDimension, - DataModel dataModel, String d, List queryDimension) { + DataModel dataModel, String d, Set queryDimension) { String oriDimension = d; boolean isAdd = false; if (d.contains(Constants.DIMENSION_IDENTIFY)) { diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java index 3a777a676..399835731 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/SourceRender.java @@ -41,14 +41,14 @@ import static com.tencent.supersonic.headless.core.translator.calcite.s2sql.Cons public class SourceRender extends Renderer { public static TableView renderOne(String alias, List fieldWheres, - List reqMetrics, List reqDimensions, String queryWhere, + Set reqMetrics, Set reqDimensions, String queryWhere, DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { TableView dataSet = new TableView(); TableView output = new TableView(); - List queryMetrics = new ArrayList<>(reqMetrics); - List queryDimensions = new ArrayList<>(reqDimensions); + Set queryMetrics = new HashSet<>(reqMetrics); + Set queryDimensions = new HashSet<>(reqDimensions); List fieldWhere = new ArrayList<>(fieldWheres); Map extendFields = new HashMap<>(); if (!fieldWhere.isEmpty()) { @@ -57,9 +57,7 @@ public class SourceRender extends Renderer { whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics); queryMetrics.addAll(metrics); - queryMetrics = uniqList(queryMetrics); queryDimensions.addAll(dimensions); - queryDimensions = uniqList(queryDimensions); mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields, datasource, scope, schema, nonAgg); } @@ -182,8 +180,8 @@ public class SourceRender extends Renderer { } } - private static List getWhereMeasure(List fields, List queryMetrics, - List queryDimensions, Map extendFields, DataModel datasource, + private static List getWhereMeasure(List fields, Set queryMetrics, + Set queryDimensions, Map extendFields, DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { Iterator iterator = fields.iterator(); List whereNode = new ArrayList<>(); @@ -224,17 +222,17 @@ public class SourceRender extends Renderer { } private static void mergeWhere(List fields, TableView dataSet, TableView outputSet, - List queryMetrics, List queryDimensions, - Map extendFields, DataModel datasource, SqlValidatorScope scope, - S2CalciteSchema schema, boolean nonAgg) throws Exception { + Set queryMetrics, Set queryDimensions, Map extendFields, + DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) + throws Exception { List whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource, scope, schema, nonAgg); dataSet.getMeasure().addAll(whereNode); // getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema); } - public static void whereDimMetric(List fields, List queryMetrics, - List queryDimensions, DataModel datasource, S2CalciteSchema schema, + public static void whereDimMetric(List fields, Set queryMetrics, + Set queryDimensions, DataModel datasource, S2CalciteSchema schema, Set dimensions, Set metrics) { for (String field : fields) { if (queryDimensions.contains(field) || queryMetrics.contains(field)) { @@ -310,7 +308,7 @@ public class SourceRender extends Renderer { return false; } - private static void addTimeDimension(DataModel dataModel, List queryDimension) { + private static void addTimeDimension(DataModel dataModel, Set queryDimension) { if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { Optional startTimeOp = dataModel.getDimensions().stream() .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java index 9b46c8388..f5d1a7696 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java @@ -158,7 +158,6 @@ public class SqlQueryConverter implements QueryConverter { ontologyParam.getMetrics().addAll(measures); } else { // empty measure , fill default - ontologyParam.setMetrics(new ArrayList<>()); ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName( getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions()))); } @@ -168,8 +167,8 @@ public class SqlQueryConverter implements QueryConverter { } private Map generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, - SemanticSchemaResp semanticSchemaResp, AggOption aggOption, List metrics, - List dimensions, Set measures) { + SemanticSchemaResp semanticSchemaResp, AggOption aggOption, Set metrics, + Set dimensions, Set measures) { Map result = new HashMap<>(); List metricResps = semanticSchemaResp.getMetrics(); List dimensionResps = semanticSchemaResp.getDimensions(); @@ -291,7 +290,7 @@ public class SqlQueryConverter implements QueryConverter { return elements.stream(); } - private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List dimensions) { + private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, Set dimensions) { if (!CollectionUtils.isEmpty(dimensions)) { Map modelMatchCnt = new HashMap<>(); for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index 7e5eff936..5a6228b9e 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -2,33 +2,14 @@ package com.tencent.supersonic.headless.server.manager; import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; -import com.tencent.supersonic.headless.api.pojo.Field; import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.TagResp; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.*; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization.TimePartType; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; -import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.FieldParamYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MetricParamYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl; -import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl; +import com.tencent.supersonic.headless.server.pojo.yaml.*; import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.utils.DatabaseConverter; import lombok.extern.slf4j.Slf4j; @@ -36,15 +17,8 @@ import org.apache.commons.lang3.tuple.Triple; import org.springframework.stereotype.Service; import org.springframework.util.CollectionUtils; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; import java.util.stream.Collectors; @Slf4j @@ -184,16 +158,6 @@ public class SemanticSchemaManager { if (Objects.nonNull(d.getModelSourceTypeEnum())) { dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); } - if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) { - Set measures = dataModel.getMeasures().stream().map(mm -> mm.getName()) - .collect(Collectors.toSet()); - for (Field f : d.getFields()) { - if (!measures.contains(f.getFieldName())) { - dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName()) - .name(f.getFieldName()).agg("").build()); - } - } - } return dataModel; } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/SchemaService.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/SchemaService.java index f7d978bcc..f240bcf66 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/SchemaService.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/SchemaService.java @@ -9,15 +9,7 @@ import com.tencent.supersonic.headless.api.pojo.MetaFilter; import com.tencent.supersonic.headless.api.pojo.SemanticSchema; import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq; import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq; -import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; -import com.tencent.supersonic.headless.api.pojo.response.DimensionResp; -import com.tencent.supersonic.headless.api.pojo.response.DomainResp; -import com.tencent.supersonic.headless.api.pojo.response.ItemResp; -import com.tencent.supersonic.headless.api.pojo.response.ItemUseResp; -import com.tencent.supersonic.headless.api.pojo.response.MetricResp; -import com.tencent.supersonic.headless.api.pojo.response.ModelResp; -import com.tencent.supersonic.headless.api.pojo.response.ModelSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; +import com.tencent.supersonic.headless.api.pojo.response.*; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl; @@ -64,5 +56,4 @@ public interface SchemaService { ItemDateResp getItemDate(ItemDateFilter dimension, ItemDateFilter metric); - DatabaseResp getDatabase(Long id); } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/SchemaServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/SchemaServiceImpl.java index 2c4a62bd5..950db495e 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/SchemaServiceImpl.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/SchemaServiceImpl.java @@ -536,8 +536,4 @@ public class SchemaServiceImpl implements SchemaService { return modelService.getItemDate(dimension, metric); } - @Override - public DatabaseResp getDatabase(Long id) { - return databaseService.getDatabase(id); - } } diff --git a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java index a0a8080da..2c4fe832d 100644 --- a/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java +++ b/headless/server/src/test/java/com/tencent/supersonic/headless/server/calcite/HeadlessParserServiceTest.java @@ -13,6 +13,7 @@ import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; @Slf4j @@ -155,8 +156,8 @@ class HeadlessParserServiceTest { // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); OntologyQueryParam metricCommand = new OntologyQueryParam(); - metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date"))); - metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv"))); + metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date"))); + metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv"))); metricCommand.setWhere( "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); metricCommand.setLimit(1000L); @@ -168,9 +169,9 @@ class HeadlessParserServiceTest { addDepartment(semanticSchema); OntologyQueryParam metricCommand2 = new OntologyQueryParam(); - metricCommand2.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date", + metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date", "user_name__department", "user_name", "user_name__page"))); - metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv"))); + metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv"))); metricCommand2.setWhere( "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); metricCommand2.setLimit(1000L); diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index 77bc0081b..f80458c31 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -383,9 +383,9 @@ public class S2VisitsDemo extends S2BaseDemo { metricReq.setDescription("访问的用户个数"); metricReq.setAlias("UV,访问人数"); MetricDefineByFieldParams metricTypeParams = new MetricDefineByFieldParams(); - metricTypeParams.setExpr("count(distinct user_id)"); + metricTypeParams.setExpr("count(distinct user_name)"); List fieldParams = new ArrayList<>(); - fieldParams.add(new FieldParam("user_id")); + fieldParams.add(new FieldParam("user_name")); metricTypeParams.setFields(fieldParams); metricReq.setMetricDefineByFieldParams(metricTypeParams); metricReq.setMetricDefineType(MetricDefineType.FIELD); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java index 7353dd526..c63732666 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java @@ -58,6 +58,7 @@ public class QueryByMetricTest extends BaseTest { @Test public void testWithMetricAndDimensionIds() throws Exception { + System.setProperty("s2.test", "true"); QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setDomainId(1L); queryMetricReq.setMetricIds(Arrays.asList(1L, 3L)); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/TranslateTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/TranslateTest.java index 555711cc5..18d2880bb 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/TranslateTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/TranslateTest.java @@ -18,7 +18,7 @@ public class TranslateTest extends BaseTest { public void testSqlExplain() throws Exception { String sql = "SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数PVUV统计 GROUP BY 部门 "; SemanticTranslateResp explain = semanticLayerService.translate( - QueryReqBuilder.buildS2SQLReq(sql, DataUtils.getMetricAgentView()), + QueryReqBuilder.buildS2SQLReq(sql, DataUtils.productDatasetId), User.getDefaultUser()); assertNotNull(explain); assertNotNull(explain.getQuerySQL()); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java b/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java index a882293c1..2292d73b5 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/util/DataUtils.java @@ -18,6 +18,11 @@ public class DataUtils { public static final Integer productAgentId = 1; public static final Integer companyAgentId = 2; public static final Integer singerAgentId = 3; + + public static final Long productDatasetId = 1L; + public static final Long companyDatasetId = 2L; + public static final Long singerDatasettId = 3L; + public static final Integer ONE_TURNS_CHAT_ID = 10; private static final User user_test = User.getDefaultUser(); @@ -92,7 +97,4 @@ public class DataUtils { return result; } - public static Long getMetricAgentView() { - return 1L; - } } From 2eca2d1c14bbcbbeac89564e5c17b84a0eada1d2 Mon Sep 17 00:00:00 2001 From: tristanliu Date: Tue, 26 Nov 2024 20:28:24 +0800 Subject: [PATCH 18/88] [improvement][headless-fe] Revised the interaction for semantic modeling routing and implemented the initial version of metric management switching. --- .../packages/supersonic-fe/config/routes.ts | 82 +++- .../src/pages/SemanticModel/DomainManager.tsx | 87 +--- .../pages/SemanticModel/Insights/Market.tsx | 2 +- .../src/pages/SemanticModel/Metric/Edit.tsx | 12 +- .../components/MetricInfoCreateForm.tsx | 2 +- .../src/pages/SemanticModel/ModelManager.tsx | 30 +- .../pages/SemanticModel/OverviewContainer.tsx | 128 ++++++ .../SemanticModel/OverviewContainer1.tsx | 215 ---------- .../SemanticModel/OverviewContainerRight.tsx | 62 --- .../pages/SemanticModel/PageBreadcrumb.tsx | 78 ++++ .../View/components/DataSetTable.tsx | 7 +- .../components/ClassMetricTable.tsx | 14 +- .../SemanticModel/components/DomainList.tsx | 111 +++-- .../components/DomainManagerTab.tsx | 28 +- .../components/MetricInfoCreateForm.tsx | 2 +- .../components/ModelManagerTab.tsx | 18 +- .../SemanticModel/components/ModelTable.tsx | 2 +- .../components/TableColumnRender.tsx | 393 +++++++----------- .../pages/SemanticModel/components/style.less | 19 +- .../src/pages/SemanticModel/index.tsx | 162 ++------ .../pages/SemanticModel/models/metricData.ts | 3 + .../src/pages/SemanticModel/service.ts | 2 +- .../packages/supersonic-fe/src/utils/utils.ts | 7 + 23 files changed, 571 insertions(+), 895 deletions(-) create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx delete mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx delete mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainerRight.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx diff --git a/webapp/packages/supersonic-fe/config/routes.ts b/webapp/packages/supersonic-fe/config/routes.ts index 946e72eca..04a8413d8 100644 --- a/webapp/packages/supersonic-fe/config/routes.ts +++ b/webapp/packages/supersonic-fe/config/routes.ts @@ -62,27 +62,85 @@ const ROUTES = [ envEnableList: [ENV_KEY.SEMANTIC], routes: [ { - path: '/model/:domainId', - component: './SemanticModel/DomainManager', - envEnableList: [ENV_KEY.SEMANTIC], + path: '/model/', + redirect: '/model/domain', + }, + { + path: '/model/domain/', + component: './SemanticModel/OverviewContainer', routes: [ { - path: '/model/:domainId/:menuKey', + path: '/model/domain/:domainId', component: './SemanticModel/DomainManager', + routes: [ + { + path: '/model/domain/:domainId/:menuKey', + component: './SemanticModel/DomainManager', + }, + ], + }, + { + path: '/model/domain/manager/:domainId/:modelId', + component: './SemanticModel/ModelManager', + routes: [ + { + path: '/model/domain/manager/:domainId/:modelId/:menuKey', + component: './SemanticModel/ModelManager', + }, + ], }, ], }, { - path: '/model/manager/:domainId/:modelId', - component: './SemanticModel/ModelManager', + path: '/model/metric/:domainId/:modelId/:metricId', + component: './SemanticModel/Metric/Edit', envEnableList: [ENV_KEY.SEMANTIC], - routes: [ - { - path: '/model/manager/:domainId/:modelId/:menuKey', - component: './SemanticModel/ModelManager', - }, - ], + // routes: [ + // { + // path: '/model/manager/:domainId/:modelId/:menuKey', + // component: './SemanticModel/ModelManager', + // }, + // ], }, + // { + // path: '/model/manager/', + // component: './SemanticModel/OverviewContainer', + // routes: [ + // { + // path: '/model/manager/:domainId/:modelId', + // component: './SemanticModel/ModelManager', + // routes: [ + // { + // path: '/model/manager/:domainId/:modelId/:menuKey', + // component: './SemanticModel/ModelManager', + // }, + // ], + // }, + // ], + // }, + // { + // path: '/model/:domainId', + // component: './SemanticModel/DomainManager', + // envEnableList: [ENV_KEY.SEMANTIC], + // routes: [ + // { + // path: '/model/:domainId/:menuKey', + // component: './SemanticModel/DomainManager', + // }, + // ], + // }, + // { + // path: '/model/manager/:domainId/:modelId', + // component: './SemanticModel/ModelManager', + // envEnableList: [ENV_KEY.SEMANTIC], + // routes: [ + // { + // path: '/model/manager/:domainId/:modelId/:menuKey', + // component: './SemanticModel/ModelManager', + // }, + // ], + // }, + // { // path: '/model/:domainId/:modelId/:menuKey', // component: './SemanticModel/DomainManager', diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx index 4ff772a5d..d278a710b 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx @@ -1,103 +1,26 @@ -import { message } from 'antd'; -import React, { useEffect, useState } from 'react'; +import React, { useState } from 'react'; import { history, useParams, useModel } from '@umijs/max'; -import { ISemantic } from './data'; -import { getDomainList, getDataSetList } from './service'; import DomainManagerTab from './components/DomainManagerTab'; -import { isArrayOfValues } from '@/utils/utils'; type Props = {}; const DomainManager: React.FC = ({}) => { const defaultTabKey = 'overview'; const params: any = useParams(); - const domainId = params.domainId; const domainModel = useModel('SemanticModel.domainData'); - const modelModel = useModel('SemanticModel.modelData'); - const databaseModel = useModel('SemanticModel.databaseData'); - const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; - const { selectModelId } = modelModel; - const { MrefreshDatabaseList } = databaseModel; + + const { selectDomainId } = domainModel; const menuKey = params.menuKey ? params.menuKey : defaultTabKey; - const [collapsedState, setCollapsedState] = useState(true); + const [activeKey, setActiveKey] = useState(menuKey); - const [dataSetList, setDataSetList] = useState([]); - - // const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { - // const targetNode = domainList.filter((item: any) => { - // return `${item.id}` === domainId; - // })[0]; - // if (!targetNode) { - // const firstRootNode = domainList.filter((item: any) => { - // return item.parentId === 0; - // })[0]; - // if (firstRootNode) { - // const { id } = firstRootNode; - // setSelectDomain(firstRootNode); - // setActiveKey(menuKey); - // pushUrlMenu(id, 0, menuKey); - // } - // } else { - // setSelectDomain(targetNode); - // } - // }; - - // const initProjectTree = async () => { - // const { code, data, msg } = await getDomainList(); - // if (code === 200) { - // initSelectedDomain(data); - // setDomainList(data); - // } else { - // message.error(msg); - // } - // }; - - // useEffect(() => { - // initProjectTree(); - // MrefreshDatabaseList(); - // }, []); - - // useEffect(() => { - // if (!selectDomainId) { - // return; - // } - // // queryModelList(); - // queryDataSetList(); - // }, [selectDomainId]); - - // const queryDataSetList = async () => { - // const { code, data, msg } = await getDataSetList(selectDomainId); - // if (code === 200) { - // setDataSetList(data); - // if (!isArrayOfValues(data)) { - // setActiveKey(defaultTabKey); - // } - // } else { - // message.error(msg); - // } - // }; const pushUrlMenu = (domainId: number, menuKey: string) => { - history.push(`/model/${domainId}/${menuKey}`); + history.push(`/model/domain/${domainId}/${menuKey}`); }; - const cleanModelInfo = (domainId) => { - setActiveKey(defaultTabKey); - pushUrlMenu(domainId, defaultTabKey); - // setSelectModel(undefined); - }; - - // const handleCollapsedBtn = () => { - // setCollapsedState(!collapsedState); - // }; - return ( { - cleanModelInfo(selectDomainId); - }} onMenuChange={(menuKey) => { setActiveKey(menuKey); pushUrlMenu(selectDomainId, menuKey); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/Market.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/Market.tsx index bcb2df739..b2c931391 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/Market.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/Market.tsx @@ -155,7 +155,7 @@ const ClassMetricTable: React.FC = ({}) => { const columnsConfig = ColumnsConfig({ indicatorInfo: { - url: '/tag/detail/', + url: '/tag/detail/:indicatorId', starType: 'tag', }, }); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit.tsx index abcf8432d..e7bec7836 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit.tsx @@ -1,7 +1,7 @@ import { message } from 'antd'; import React, { useState, useEffect } from 'react'; import { getMetricData } from '../service'; -import { useParams } from '@umijs/max'; +import { useParams, useModel } from '@umijs/max'; import styles from './style.less'; import { ISemantic } from '../data'; import MetricInfoEditSider from './MetricInfoEditSider'; @@ -14,7 +14,8 @@ const MetricDetail: React.FC = () => { const params: any = useParams(); const metricId = params.metricId; const [metircData, setMetircData] = useState(); - + const metricModel = useModel('SemanticModel.metricData'); + const { selectMetric, setSelectMetric } = metricModel; const [settingKey, setSettingKey] = useState(MetricSettingKey.BASIC); useEffect(() => { @@ -24,10 +25,17 @@ const MetricDetail: React.FC = () => { queryMetricData(metricId); }, [metricId]); + useEffect(() => { + return () => { + setSelectMetric(undefined); + }; + }, []); + const queryMetricData = async (metricId: string) => { const { code, data, msg } = await getMetricData(metricId); if (code === 200) { setMetircData({ ...data }); + setSelectMetric({ ...data }); return; } message.error(msg); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx index 2efcc3291..217b25e05 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx @@ -905,7 +905,7 @@ const MetricInfoCreateForm: React.FC = ({ key="console" onClick={() => { history.replace( - `/model/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, + `/model/domain/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, ); onCancel?.(); }} diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx index 9ec4abd73..1cf463814 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx @@ -4,27 +4,18 @@ import ModelManagerTab from './components/ModelManagerTab'; type Props = {}; -const OverviewContainer: React.FC = ({}) => { +const ModelManager: React.FC = ({}) => { const defaultTabKey = 'overview'; const params: any = useParams(); - const domainId = params.domainId; const modelId = params.modelId; const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); const dimensionModel = useModel('SemanticModel.dimensionData'); const metricModel = useModel('SemanticModel.metricData'); - const databaseModel = useModel('SemanticModel.databaseData'); - const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; - const { - selectModelId, - modelList, - MrefreshModelList, - setSelectModel, - setModelTableHistoryParams, - } = modelModel; + const { selectDomainId } = domainModel; + const { selectModelId, modelList } = modelModel; const { MrefreshDimensionList } = dimensionModel; const { MrefreshMetricList } = metricModel; - const { MrefreshDatabaseList } = databaseModel; const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; const [activeKey, setActiveKey] = useState(menuKey); @@ -35,7 +26,7 @@ const OverviewContainer: React.FC = ({}) => { }; useEffect(() => { - if (!selectModelId) { + if (!selectModelId || `${selectModelId}` === `${modelId}`) { return; } initModelConfig(); @@ -44,22 +35,13 @@ const OverviewContainer: React.FC = ({}) => { }, [selectModelId]); const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { - history.push(`/model/manager/${domainId}/${modelId}/${menuKey}`); - }; - - const cleanModelInfo = (domainId) => { - setActiveKey(defaultTabKey); - pushUrlMenu(domainId, 0, defaultTabKey); - setSelectModel(undefined); + history.push(`/model/domain/manager/${domainId}/${modelId}/${menuKey}`); }; return ( { - cleanModelInfo(selectDomainId); - }} onMenuChange={(menuKey) => { setActiveKey(menuKey); pushUrlMenu(selectDomainId, selectModelId, menuKey); @@ -68,4 +50,4 @@ const OverviewContainer: React.FC = ({}) => { ); }; -export default OverviewContainer; +export default ModelManager; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx new file mode 100644 index 000000000..e99c011ef --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx @@ -0,0 +1,128 @@ +import React, { useEffect, useState } from 'react'; +import { history, useParams, useModel, Outlet } from '@umijs/max'; +import DomainListTree from './components/DomainList'; +import styles from './components/style.less'; +import { LeftOutlined, RightOutlined } from '@ant-design/icons'; +import { ISemantic } from './data'; + +type Props = {}; + +const OverviewContainer: React.FC = ({}) => { + const defaultTabKey = 'overview'; + const params: any = useParams(); + const domainId = params.domainId; + const modelId = params.modelId; + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + const databaseModel = useModel('SemanticModel.databaseData'); + const { setSelectDomain, setDomainList, selectDomainId } = domainModel; + const { setSelectModel, setModelTableHistoryParams, MrefreshModelList } = modelModel; + const { MrefreshDatabaseList } = databaseModel; + const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; + const [collapsedState, setCollapsedState] = useState(true); + + useEffect(() => { + if (!selectDomainId || `${domainId}` === `${selectDomainId}`) { + return; + } + pushUrlMenu(selectDomainId, menuKey); + }, [selectDomainId]); + + // const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { + // const targetNode = domainList.filter((item: any) => { + // return `${item.id}` === domainId; + // })[0]; + // if (!targetNode) { + // const firstRootNode = domainList.filter((item: any) => { + // return item.parentId === 0; + // })[0]; + // if (firstRootNode) { + // const { id } = firstRootNode; + // setSelectDomain(firstRootNode); + // pushUrlMenu(id, menuKey); + // } + // } else { + // setSelectDomain(targetNode); + // } + // }; + + // const initProjectTree = async () => { + // const { code, data, msg } = await getDomainList(); + // if (code === 200) { + // initSelectedDomain(data); + // setDomainList(data); + // } else { + // message.error(msg); + // } + // }; + + // useEffect(() => { + // initProjectTree(); + // MrefreshDatabaseList(); + // return () => { + // setSelectDomain(undefined); + // }; + // }, []); + + const pushUrlMenu = (domainId: number, menuKey: string) => { + history.push(`/model/domain/${domainId}/${menuKey}`); + }; + + const cleanModelInfo = (domainId) => { + pushUrlMenu(domainId, defaultTabKey); + setSelectModel(undefined); + }; + + const handleCollapsedBtn = () => { + setCollapsedState(!collapsedState); + }; + + useEffect(() => { + if (!selectDomainId) { + return; + } + queryModelList(); + }, [selectDomainId]); + + const queryModelList = async () => { + await MrefreshModelList(selectDomainId); + }; + + return ( +
+
+
+
+ { + const { id } = domainData; + cleanModelInfo(id); + setSelectDomain(domainData); + setModelTableHistoryParams({ + [id]: {}, + }); + }} + // onTreeDataUpdate={() => { + // // initProjectTree(); + // }} + /> +
+ +
{ + handleCollapsedBtn(); + }} + > + {collapsedState ? : } +
+
+
+ +
+
+
+ ); +}; + +export default OverviewContainer; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx deleted file mode 100644 index 63534cd48..000000000 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer1.tsx +++ /dev/null @@ -1,215 +0,0 @@ -import { message } from 'antd'; -import React, { useEffect, useState } from 'react'; -import { history, useParams, useModel } from '@umijs/max'; -import DomainListTree from './components/DomainList'; -import styles from './components/style.less'; -import { LeftOutlined, RightOutlined } from '@ant-design/icons'; -import { ISemantic } from './data'; -import { getDomainList, getDataSetList } from './service'; -import DomainManagerTab from './components/DomainManagerTab'; -import { isArrayOfValues } from '@/utils/utils'; -import OverviewContainerRight from './components/OverviewContainerRight'; - -type Props = { - mode: 'domain'; -}; - -const OverviewContainer: React.FC = ({ mode = 'domain' }) => { - const defaultTabKey = 'overview'; - const params: any = useParams(); - const domainId = params.domainId; - const modelId = params.modelId; - const domainModel = useModel('SemanticModel.domainData'); - const modelModel = useModel('SemanticModel.modelData'); - const dimensionModel = useModel('SemanticModel.dimensionData'); - const metricModel = useModel('SemanticModel.metricData'); - const databaseModel = useModel('SemanticModel.databaseData'); - const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; - const { - selectModelId, - modelList, - MrefreshModelList, - setSelectModel, - setModelTableHistoryParams, - } = modelModel; - const { MrefreshDimensionList } = dimensionModel; - const { MrefreshMetricList } = metricModel; - const { MrefreshDatabaseList } = databaseModel; - const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; - const [isModel, setIsModel] = useState(false); - const [collapsedState, setCollapsedState] = useState(true); - const [activeKey, setActiveKey] = useState(menuKey); - const [dataSetList, setDataSetList] = useState([]); - - const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { - const targetNode = domainList.filter((item: any) => { - return `${item.id}` === domainId; - })[0]; - if (!targetNode) { - const firstRootNode = domainList.filter((item: any) => { - return item.parentId === 0; - })[0]; - if (firstRootNode) { - const { id } = firstRootNode; - setSelectDomain(firstRootNode); - setActiveKey(menuKey); - pushUrlMenu(id, 0, menuKey); - } - } else { - setSelectDomain(targetNode); - } - }; - - const initProjectTree = async () => { - const { code, data, msg } = await getDomainList(); - if (code === 200) { - initSelectedDomain(data); - setDomainList(data); - } else { - message.error(msg); - } - }; - - useEffect(() => { - initProjectTree(); - MrefreshDatabaseList(); - return () => { - setSelectDomain(undefined); - setSelectModel(undefined); - }; - }, []); - - useEffect(() => { - if (!selectDomainId) { - return; - } - queryModelList(); - queryDataSetList(); - }, [selectDomainId]); - - const queryDataSetList = async () => { - const { code, data, msg } = await getDataSetList(selectDomainId); - if (code === 200) { - setDataSetList(data); - if (!isArrayOfValues(data)) { - setActiveKey(defaultTabKey); - } - } else { - message.error(msg); - } - }; - - const queryModelList = async () => { - await MrefreshModelList(selectDomainId); - }; - - useEffect(() => { - if (!selectDomainId) { - return; - } - setIsModel(false); - }, [domainList, selectDomainId]); - - const initModelConfig = () => { - setIsModel(true); - const currentMenuKey = menuKey === defaultTabKey ? '' : menuKey; - pushUrlMenu(selectDomainId, selectModelId, currentMenuKey); - setActiveKey(currentMenuKey); - }; - - useEffect(() => { - if (!selectModelId) { - return; - } - initModelConfig(); - MrefreshDimensionList({ modelId: selectModelId }); - MrefreshMetricList({ modelId: selectModelId }); - }, [selectModelId]); - - const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { - history.push(`/model/${domainId}/${modelId || 0}/${menuKey}`); - }; - - const handleModelChange = (model?: ISemantic.IModelItem) => { - if (!model) { - return; - } - if (`${model.id}` === `${selectModelId}`) { - initModelConfig(); - } - setSelectModel(model); - }; - - const cleanModelInfo = (domainId) => { - setIsModel(false); - setActiveKey(defaultTabKey); - pushUrlMenu(domainId, 0, defaultTabKey); - setSelectModel(undefined); - }; - - const handleCollapsedBtn = () => { - setCollapsedState(!collapsedState); - }; - - return ( -
-
-
-
- { - const { id } = domainData; - cleanModelInfo(id); - setSelectDomain(domainData); - setModelTableHistoryParams({ - [id]: {}, - }); - }} - onTreeDataUpdate={() => { - initProjectTree(); - }} - /> -
- -
{ - handleCollapsedBtn(); - }} - > - {collapsedState ? : } -
-
-
- {selectDomainId ? ( - <> - - { - handleModelChange(model); - MrefreshModelList(selectDomainId); - }} - onBackDomainBtnClick={() => { - cleanModelInfo(selectDomainId); - }} - onMenuChange={(menuKey) => { - setActiveKey(menuKey); - pushUrlMenu(selectDomainId, selectModelId, menuKey); - }} - /> - - ) : ( -

请选择项目

- )} -
-
-
- ); -}; - -export default OverviewContainer; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainerRight.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainerRight.tsx deleted file mode 100644 index c4d3430a6..000000000 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainerRight.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { Outlet } from '@umijs/max'; -import { Tabs, Breadcrumb, Space, Radio } from 'antd'; -import React, { useRef, useEffect, useState } from 'react'; -import { history, useModel } from '@umijs/max'; -import { HomeOutlined, FundViewOutlined } from '@ant-design/icons'; -import styles from './components/style.less'; - -const OverviewContainerRight: React.FC = () => { - const domainModel = useModel('SemanticModel.domainData'); - const modelModel = useModel('SemanticModel.modelData'); - - const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; - const { selectModelId, selectModelName, setSelectModel } = modelModel; - - return ( - <> - { - // onBackDomainBtnClick?.(); - setSelectModel(undefined); - history.push(`/model/${selectDomainId}/overview`); - }} - style={ - selectModelName ? { cursor: 'pointer' } : { color: '#296df3', fontWeight: 'bold' } - } - > - - {selectDomainName} - - ), - }, - { - type: 'separator', - separator: selectModelName ? '/' : '', - }, - { - title: selectModelName ? ( - { - history.push(`/model/manager/${selectDomainId}/${selectModelId}/`); - }} - style={{ color: '#296df3' }} - > - - {selectModelName} - - ) : undefined, - }, - ]} - /> - - - ); -}; - -export default OverviewContainerRight; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx new file mode 100644 index 000000000..8c3efdc37 --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx @@ -0,0 +1,78 @@ +import { Outlet } from '@umijs/max'; +import { Tabs, Breadcrumb, Space, Radio } from 'antd'; +import React, { useRef, useEffect, useState } from 'react'; +import { history, useModel } from '@umijs/max'; +import { HomeOutlined, FundViewOutlined } from '@ant-design/icons'; +import styles from './components/style.less'; + +const PageBreadcrumb: React.FC = () => { + const domainModel = useModel('SemanticModel.domainData'); + const modelModel = useModel('SemanticModel.modelData'); + const metricModel = useModel('SemanticModel.metricData'); + const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; + const { selectModelId, selectModelName, setSelectModel } = modelModel; + + const { selectMetric, setSelectMetric } = metricModel; + + const items = [ + { + title: ( + { + setSelectModel(undefined); + history.push(`/model/domain/${selectDomainId}/overview`); + }} + > + + {selectDomainName} + + ), + }, + ]; + + if (selectModelName) { + items.push( + { + type: 'separator', + separator: '/', + }, + { + title: ( + { + setSelectMetric(undefined); + history.push(`/model/domain/manager/${selectDomainId}/${selectModelId}/`); + }} + > + + {selectModelName} + + ), + }, + ); + } + + if (selectMetric?.name) { + items.push( + { + type: 'separator', + separator: '/', + }, + { + title: selectMetric?.name ? ( + + + {selectMetric.name} + + ) : undefined, + }, + ); + } + return ( + <> + + + ); +}; + +export default PageBreadcrumb; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx index 2fea630fd..d75534090 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx @@ -45,11 +45,10 @@ const DataSetTable: React.FC = ({ disabledEdit = false }) => { const [viewList, setViewList] = useState(); - // useEffect(() => { - // setViewList(dataSetList); - // }, [dataSetList]); - useEffect(() => { + if (!selectDomainId) { + return; + } queryDataSetList(); queryDomainAllModel(); }, [selectDomainId]); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx index 9e0958a2a..3a22ba7ec 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ClassMetricTable.tsx @@ -3,7 +3,7 @@ import { ProTable } from '@ant-design/pro-components'; import { message, Button, Space, Popconfirm, Input, Select, Tag } from 'antd'; import React, { useRef, useState, useEffect } from 'react'; import { StatusEnum, SemanticNodeType } from '../enum'; -import { useModel } from '@umijs/max'; +import { useModel, history } from '@umijs/max'; import { SENSITIVE_LEVEL_ENUM, SENSITIVE_LEVEL_OPTIONS, TAG_DEFINE_TYPE } from '../constant'; import { queryMetric, @@ -32,7 +32,7 @@ const ClassMetricTable: React.FC = ({ onEmptyMetricData }) => { const metricModel = useModel('SemanticModel.metricData'); const { selectDomainId } = domainModel; const { selectModelId: modelId } = modelModel; - const { MrefreshMetricList } = metricModel; + const { MrefreshMetricList, selectMetric, setSelectMetric } = metricModel; const [batchSensitiveLevelOpenState, setBatchSensitiveLevelOpenState] = useState(false); const [createModalVisible, setCreateModalVisible] = useState(false); const [metricItem, setMetricItem] = useState(); @@ -144,7 +144,10 @@ const ClassMetricTable: React.FC = ({ onEmptyMetricData }) => { const columnsConfig = ColumnsConfig({ indicatorInfo: { - url: '/model/metric/edit/', + url: '/model/metric/:domainId/:modelId/:indicatorId', + onNameClick: (record: ISemantic.IMetricItem) => { + setSelectMetric(record); + }, }, }); @@ -240,8 +243,9 @@ const ClassMetricTable: React.FC = ({ onEmptyMetricData }) => { type="link" key="metricEditBtn" onClick={() => { - setMetricItem(record); - setCreateModalVisible(true); + history.push(`/model/metric/${record.domainId}/${record.modelId}/${record.id}`); + // setMetricItem(record); + // setCreateModalVisible(true); }} > 编辑 diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainList.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainList.tsx index d67bdfef4..59306b816 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainList.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainList.tsx @@ -1,14 +1,11 @@ -import { DownOutlined, PlusOutlined, EditOutlined, DeleteOutlined } from '@ant-design/icons'; -import { Input, message, Tree, Popconfirm, Tooltip, Row, Col, Button, Menu } from 'antd'; +import { PlusOutlined, EditOutlined, DeleteOutlined } from '@ant-design/icons'; +import { Input, message, Popconfirm, Tooltip, Row, Col, Button, Menu } from 'antd'; import type { DataNode } from 'antd/lib/tree'; import { useEffect, useState } from 'react'; -import type { FC, Key } from 'react'; +import type { FC } from 'react'; import { useModel } from '@umijs/max'; import { createDomain, updateDomain, deleteDomain } from '../service'; -import { treeParentKeyLists } from '../utils'; import DomainInfoForm from './DomainInfoForm'; -import { constructorClassTreeFromList, addPathInTreeData } from '../utils'; -import { AppstoreOutlined } from '@ant-design/icons'; import styles from './style.less'; import { ISemantic } from '../data'; @@ -42,20 +39,15 @@ const DomainListTree: FC = ({ onTreeSelected, onTreeDataUpdate, }) => { - const [projectTree, setProjectTree] = useState([]); const [projectInfoModalVisible, setProjectInfoModalVisible] = useState(false); const [domainInfoParams, setDomainInfoParams] = useState({}); const [filterValue, setFliterValue] = useState(''); - const [expandedKeys, setExpandedKeys] = useState([]); const [classList, setClassList] = useState([]); const domainModel = useModel('SemanticModel.domainData'); const { selectDomainId, domainList } = domainModel; useEffect(() => { - const treeData = addPathInTreeData(constructorClassTreeFromList(domainList)); - setProjectTree(treeData); setClassList(domainList); - setExpandedKeys(treeParentKeyLists(treeData)); }, [domainList]); const onSearch = (value: any) => { @@ -67,7 +59,7 @@ const DomainListTree: FC = ({ return; } const targetNodeData = classList.filter((item: any) => { - return item.id === selectedKeys; + return `${item.id}` === `${selectedKeys}`; })[0]; onTreeSelected?.(targetNodeData); }; @@ -84,20 +76,6 @@ const DomainListTree: FC = ({ } }; - // const createDefaultModelSet = async (domainId: number) => { - // const { code, msg } = await createDomain({ - // modelType: 'add', - // type: 'normal', - // parentId: domainId, - // name: '默认模型集', - // bizName: `defaultModelSet_${(Math.random() * 1000000).toFixed(0)}`, - // isUnique: 1, - // }); - // if (code !== 200) { - // message.error(msg); - // } - // }; - const domainSubmit = async (values: any) => { if (values.modelType === 'add') { const { code, data } = await createDomain(values); @@ -126,21 +104,19 @@ const DomainListTree: FC = ({ const { id, name, path, hasEditPermission, parentId, hasModel } = node as any; const type = parentId === 0 ? 'top' : 'normal'; return ( -
- { - handleSelect(id); - }} - > - {name} - +
{ + // handleSelect(id); + // }} + > + {name} {createDomainBtnVisible && hasEditPermission && ( {Array.isArray(path) && path.length < 2 && !hasModel && ( { + onClick={(e) => { setDomainInfoParams({ modelType: 'add', type: 'normal', @@ -148,19 +124,21 @@ const DomainListTree: FC = ({ parentName: name, }); setProjectInfoModalVisible(true); + e.stopPropagation(); }} /> )} { + onClick={(e) => { setDomainInfoParams({ modelType: 'edit', type, ...node, }); setProjectInfoModalVisible(true); + e.stopPropagation(); }} /> = ({ okText="是" cancelText="否" > - + { + e.stopPropagation(); + }} + /> )} @@ -180,14 +163,14 @@ const DomainListTree: FC = ({ ); }; - const projectRenderTree = filterValue ? projectTreeFlat(projectTree, filterValue) : projectTree; - - const handleExpand = (_expandedKeys: Key[]) => { - setExpandedKeys(_expandedKeys as string[]); - }; - const items = domainList - .filter((domain) => domain.parentId === 0) + .filter((domain) => { + if (filterValue) { + return domain.parentId === 0 && domain.name.includes(filterValue); + } else { + return domain.parentId === 0; + } + }) .map((domain: ISemantic.IDomainItem) => { return { key: domain.id, @@ -245,6 +228,12 @@ const DomainListTree: FC = ({ className={styles.search} placeholder="请输入名称搜索" onSearch={onSearch} + onChange={(e) => { + const value = e.target.value; + if (!value) { + setFliterValue(value); + } + }} /> {createDomainBtnVisible && ( @@ -265,25 +254,19 @@ const DomainListTree: FC = ({ )}
- - {/* } - defaultExpandAll={true} - treeData={projectRenderTree} - titleRender={titleRender} - /> */} + {selectDomainId && ( + { + handleSelect(info.key); + }} + /> + )} {projectInfoModalVisible && ( void; - onBackDomainBtnClick?: () => void; onMenuChange?: (menuKey: string) => void; }; -const DomainManagerTab: React.FC = ({ - activeKey, - // modelList, - dataSetList, - // handleModelChange, - onBackDomainBtnClick, - onMenuChange, -}) => { +const DomainManagerTab: React.FC = ({ activeKey, onMenuChange }) => { const initState = useRef(false); const defaultTabKey = 'metric'; const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); - const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; - const { selectModelId, modelList, selectModelName } = modelModel; + const { selectDomainId, selectDomain: domainData } = domainModel; + const { selectModelId, modelList } = modelModel; useEffect(() => { initState.current = false; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/MetricInfoCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/MetricInfoCreateForm.tsx index c8f069fc4..559989e46 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/MetricInfoCreateForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/MetricInfoCreateForm.tsx @@ -901,7 +901,7 @@ const MetricInfoCreateForm: React.FC = ({ key="console" onClick={() => { history.replace( - `/model/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, + `/model/domain/manager/${domainId}/${modelId || metricItem?.modelId}/dataSource`, ); onCancel?.(); }} diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx index dcd8749eb..be3d39d83 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelManagerTab.tsx @@ -18,28 +18,14 @@ import View from '../View'; type Props = { activeKey: string; modelList: ISemantic.IModelItem[]; - handleModelChange: (model?: ISemantic.IModelItem) => void; - onBackDomainBtnClick?: () => void; onMenuChange?: (menuKey: string) => void; }; -const ModelManagerTab: React.FC = ({ - activeKey, - modelList, - handleModelChange, - onBackDomainBtnClick, - onMenuChange, -}) => { +const ModelManagerTab: React.FC = ({ activeKey, onMenuChange }) => { const initState = useRef(false); const defaultTabKey = 'metric'; - const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); - const { selectDomainId, selectDomainName, selectDomain: domainData } = domainModel; - const { selectModelId, selectModelName } = modelModel; - - useEffect(() => { - console.log(modelList, 'modelList'); - }, [modelList]); + const { selectModelId } = modelModel; useEffect(() => { initState.current = false; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx index ecd69002f..65df43670 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/ModelTable.tsx @@ -106,7 +106,7 @@ const ModelTable: React.FC = ({ modelList, disabledEdit = false, onModelC onClick={() => { setSelectModel(record); - history.push(`/model/manager/${domainId}/${id}`); + history.push(`/model/domain/manager/${domainId}/${id}`); // onModelChange?.(record); }} > diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx index c24b816c3..0a06443cc 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/TableColumnRender.tsx @@ -6,32 +6,113 @@ import { history } from '@umijs/max'; import { ISemantic } from '../data'; import { isString } from 'lodash'; import dayjs from 'dayjs'; -import { isArrayOfValues } from '@/utils/utils'; +import { isArrayOfValues, replaceRouteParams } from '@/utils/utils'; import styles from './style.less'; import IndicatorStar, { StarType } from '../components/IndicatorStar'; +interface IndicatorInfo { + url?: string; + starType?: StarType; + onNameClick?: (record: ISemantic.IMetricItem) => void | boolean; +} + +interface ColumnsConfigParams { + indicatorInfo?: IndicatorInfo; +} + const { Text, Paragraph } = Typography; -export const ColumnsConfig: any = (params?: { - indicatorInfo?: { - url?: string; - starType?: StarType; - onNameClick?: (record: ISemantic.IMetricItem) => void; - }; -}) => { +export const ColumnsConfig = (params?: ColumnsConfigParams) => { + const renderAliasAndClassifications = ( + alias: string | undefined, + classifications: string[] | undefined, + ) => ( +
+ + {alias && ( + + +
别名:
+ + + {isString(alias) && + alias.split(',').map((aliasName: string) => ( + + + {aliasName} + + + ))} + + +
+ )} + + {isArrayOfValues(classifications) && ( + + +
分类:
+ + + {classifications.map((tag: string) => ( + + + {tag} + + + ))} + + +
+ )} +
+
+ ); + return { description: { - render: (_, record: ISemantic.IMetricItem) => { - const { description } = record; - return ( - - {description} - - ); - }, + render: (_, record: ISemantic.IMetricItem) => ( + + {record.description} + + ), }, dimensionInfo: { render: (_, record: ISemantic.IDimensionItem) => { @@ -46,70 +127,26 @@ export const ColumnsConfig: any = (params?: {
{bizName}
- - {alias && ( -
- - {alias && ( - - -
别名:
- - - {isString(alias) && - alias.split(',').map((aliasName: string) => { - return ( - - - {aliasName} - - - ); - })} - - -
- )} -
-
- )} + {renderAliasAndClassifications(alias, undefined)} ); }, }, indicatorInfo: { render: (_, record: ISemantic.IMetricItem) => { - const { name, alias, bizName, classifications, id, isCollect } = record; + const { name, alias, bizName, classifications, id, isCollect, domainId, modelId } = record; + let url = `/metric/detail/`; let starType: StarType = 'metric'; - if (params) { - if (params?.indicatorInfo?.url) { - url = params.indicatorInfo.url; - } - if (params?.indicatorInfo?.starType) { - starType = params.indicatorInfo.starType; - } + if (params?.indicatorInfo) { + url = replaceRouteParams(params.indicatorInfo.url || '', { + domainId: `${domainId}`, + modelId: `${modelId}`, + indicatorId: `${id}`, + }); + starType = params.indicatorInfo.starType || 'metric'; } + return ( <>
@@ -119,205 +156,71 @@ export const ColumnsConfig: any = (params?: { style={{ fontWeight: 500 }} onClick={(event: any) => { if (params?.indicatorInfo?.onNameClick) { - params?.indicatorInfo?.onNameClick(record); - } else { - history.push(`${url}${id}`); + const state = params.indicatorInfo.onNameClick(record); + if (state === false) { + return; + } } + history.push(url); event.preventDefault(); event.stopPropagation(); }} - // href={`/webapp${url}${id}`} > {name} - {/* - {SENSITIVE_LEVEL_ENUM[sensitiveLevel]} - */}
{bizName}
- - {(alias || isArrayOfValues(classifications)) && ( -
- - {alias && ( - - -
别名:
- - - {isString(alias) && - alias.split(',').map((aliasName: string) => { - return ( - - - {aliasName} - - - ); - })} - - -
- )} - - {isArrayOfValues(classifications) && ( - - -
分类:
- - - {classifications.map((tag: string) => { - return ( - - - {tag} - - - ); - })} - - -
- )} - {/* - - - : - {id} - - - - : - {createdBy} - - */} -
-
- )} + {renderAliasAndClassifications(alias, classifications)} ); }, }, sensitiveLevel: { - render: (_, record: ISemantic.IMetricItem) => { - const { sensitiveLevel } = record; - return SENSITIVE_LEVEL_COLOR[sensitiveLevel] ? ( - - {SENSITIVE_LEVEL_ENUM[sensitiveLevel]} - - ) : ( - - 未知 - - ); - }, + render: (_, record: ISemantic.IMetricItem) => ( + + {SENSITIVE_LEVEL_ENUM[record.sensitiveLevel] || '未知'} + + ), }, state: { render: (status) => { - let tagProps: { color: string; label: string; style?: any } = { + const tagProps = { color: 'default', label: '未知', style: {}, }; switch (status) { case StatusEnum.ONLINE: - tagProps = { - // color: 'processing', - color: 'geekblue', - label: '已启用', - }; + tagProps.color = 'geekblue'; + tagProps.label = '已启用'; break; case StatusEnum.OFFLINE: - tagProps = { - color: 'default', - label: '未启用', - style: { - color: 'rgb(95, 116, 141)', - fontWeight: 400, - }, - }; + tagProps.color = 'default'; + tagProps.label = '未启用'; + tagProps.style = { color: 'rgb(95, 116, 141)', fontWeight: 400 }; break; case StatusEnum.INITIALIZED: - tagProps = { - color: 'processing', - label: '初始化', - }; + tagProps.color = 'processing'; + tagProps.label = '初始化'; break; case StatusEnum.DELETED: - tagProps = { - color: 'default', - label: '已删除', - }; + tagProps.color = 'default'; + tagProps.label = '已删除'; break; case StatusEnum.UNAVAILABLE: - tagProps = { - color: 'default', - label: '不可用', - }; + tagProps.color = 'default'; + tagProps.label = '不可用'; break; default: break; @@ -344,14 +247,12 @@ export const ColumnsConfig: any = (params?: { tooltip: '创建人/更新时间', width: 180, search: false, - render: (value: any, record: ISemantic.IMetricItem) => { - return ( - - {record.createdBy} - {value && value !== '-' ? dayjs(value).format('YYYY-MM-DD HH:mm:ss') : '-'} - - ); - }, + render: (value: any, record: ISemantic.IMetricItem) => ( + + {record.createdBy} + {value && value !== '-' ? dayjs(value).format('YYYY-MM-DD HH:mm:ss') : '-'} + + ), }, }; }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/style.less b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/style.less index 96527bfae..e5e32db88 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/style.less +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/components/style.less @@ -6,6 +6,7 @@ .projectManger { + border-top: 1px solid #eee; width: 100%; min-height: calc(100vh - 56px); background-color: #fff; @@ -163,8 +164,8 @@ // } // } .tab { - border-top: 1px solid #eee; - margin-top: 10px; + + // margin-top: 10px; :global { .ant-tabs-tab-btn { font-size: 16px; @@ -338,12 +339,14 @@ .breadcrumb{ font-size: 18px; - margin: 17px 0 0 20px; - padding-bottom: 3px; + height: 48px; + line-height: 48px; + padding: 0 20px; :global { .ant-breadcrumb-link { height: 28px; color: #709bf1; + cursor: pointer; &:hover{ color: #296df3; } @@ -351,6 +354,14 @@ .anticon { font-size: 18px; } + li { + &:last-child { + .ant-breadcrumb-link { + color: #296df3; + } + + } + } } } diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx index 7120a1482..0390e30ae 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/index.tsx @@ -1,44 +1,28 @@ import { message } from 'antd'; import React, { useEffect, useState } from 'react'; -import { history, useParams, useModel } from '@umijs/max'; +import { history, useParams, useModel, Outlet } from '@umijs/max'; import DomainListTree from './components/DomainList'; import styles from './components/style.less'; import { LeftOutlined, RightOutlined } from '@ant-design/icons'; import { ISemantic } from './data'; -import { getDomainList, getDataSetList } from './service'; -import { isArrayOfValues } from '@/utils/utils'; -import OverviewContainerRight from './OverviewContainerRight'; +import { getDomainList, getDataSetList, getModelDetail } from './service'; +import PageBreadcrumb from './PageBreadcrumb'; -type Props = { - mode: 'domain'; -}; +type Props = {}; -const OverviewContainer: React.FC = ({ mode = 'domain' }) => { - const defaultTabKey = 'overview'; +const SemanticModel: React.FC = ({}) => { const params: any = useParams(); const domainId = params.domainId; const modelId = params.modelId; const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); - const dimensionModel = useModel('SemanticModel.dimensionData'); - const metricModel = useModel('SemanticModel.metricData'); const databaseModel = useModel('SemanticModel.databaseData'); - const { selectDomainId, domainList, setSelectDomain, setDomainList } = domainModel; - const { - selectModelId, - modelList, - MrefreshModelList, - setSelectModel, - setModelTableHistoryParams, - } = modelModel; - const { MrefreshDimensionList } = dimensionModel; - const { MrefreshMetricList } = metricModel; + const metricModel = useModel('SemanticModel.metricData'); + const { setSelectDomain, setDomainList, selectDomainId } = domainModel; + const { selectModel, setSelectModel, setModelTableHistoryParams, MrefreshModelList } = modelModel; const { MrefreshDatabaseList } = databaseModel; - const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; - const [collapsedState, setCollapsedState] = useState(true); - const [activeKey, setActiveKey] = useState(menuKey); - // const [dataSetList, setDataSetList] = useState([]); + const { selectMetric, setSelectMetric } = metricModel; const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { const targetNode = domainList.filter((item: any) => { @@ -51,8 +35,7 @@ const OverviewContainer: React.FC = ({ mode = 'domain' }) => { if (firstRootNode) { const { id } = firstRootNode; setSelectDomain(firstRootNode); - setActiveKey(menuKey); - pushUrlMenu(id, 0, menuKey); + // pushUrlMenu(id, menuKey); } } else { setSelectDomain(targetNode); @@ -69,121 +52,38 @@ const OverviewContainer: React.FC = ({ mode = 'domain' }) => { } }; + const initModelData = async () => { + const { code, data, msg } = await getModelDetail({ modelId }); + if (code === 200) { + setSelectModel(data); + } else { + message.error(msg); + } + }; + useEffect(() => { initProjectTree(); MrefreshDatabaseList(); + if (modelId && modelId !== selectModel) { + initModelData(); + } + return () => { setSelectDomain(undefined); - // setSelectModel(undefined); }; }, []); - useEffect(() => { - if (!selectDomainId) { - return; - } - console.log(selectDomainId, 'selectDomainIdselectDomainId'); - queryModelList(); - // queryDataSetList(); - }, [selectDomainId]); - - // const queryDataSetList = async () => { - // const { code, data, msg } = await getDataSetList(selectDomainId); - // if (code === 200) { - // setDataSetList(data); - // if (!isArrayOfValues(data)) { - // setActiveKey(defaultTabKey); - // } - // } else { - // message.error(msg); - // } - // }; - - const queryModelList = async () => { - await MrefreshModelList(selectDomainId); - }; - - // const initModelConfig = () => { - // const currentMenuKey = menuKey === defaultTabKey ? '' : menuKey; - // pushUrlMenu(selectDomainId, selectModelId, currentMenuKey); - // setActiveKey(currentMenuKey); - // }; - - // useEffect(() => { - // if (!selectModelId) { - // return; - // } - // // initModelConfig(); - // MrefreshDimensionList({ modelId: selectModelId }); - // MrefreshMetricList({ modelId: selectModelId }); - // }, [selectModelId]); - - const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { - history.push(`/model/${domainId}/${menuKey}`); - }; - - // // const handleModelChange = (model?: ISemantic.IModelItem) => { - // // if (!model) { - // // return; - // // } - // // if (`${model.id}` === `${selectModelId}`) { - // // initModelConfig(); - // // } - // // setSelectModel(model); - // // }; - - const cleanModelInfo = (domainId) => { - setActiveKey(defaultTabKey); - pushUrlMenu(domainId, 0, defaultTabKey); - setSelectModel(undefined); - }; - - const handleCollapsedBtn = () => { - setCollapsedState(!collapsedState); - }; - return ( -
-
-
-
- { - const { id } = domainData; - cleanModelInfo(id); - setSelectDomain(domainData); - setModelTableHistoryParams({ - [id]: {}, - }); - }} - onTreeDataUpdate={() => { - initProjectTree(); - }} - /> -
- -
{ - handleCollapsedBtn(); - }} - > - {collapsedState ? : } -
-
-
- {selectDomainId ? ( - <> - - - ) : ( -

请选择项目

- )} -
+
+
+ +
+
+ + {/* */}
); }; -export default OverviewContainer; +export default SemanticModel; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/models/metricData.ts b/webapp/packages/supersonic-fe/src/pages/SemanticModel/models/metricData.ts index caab8b12e..29123730c 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/models/metricData.ts +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/models/metricData.ts @@ -5,6 +5,7 @@ import { queryMetric } from '../service'; export default function Metric() { const [metricList, setMetricList] = useState([]); + const [selectMetric, setSelectMetric] = useState(); const queryMetricList = async (params: any) => { const { code, data, msg } = await queryMetric({ @@ -25,6 +26,8 @@ export default function Metric() { return { MmetricList: metricList, + setSelectMetric: setSelectMetric, + selectMetric: selectMetric, MrefreshMetricList: refreshMetricList, MqueryMetricList: queryMetricList, }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/service.ts b/webapp/packages/supersonic-fe/src/pages/SemanticModel/service.ts index 230ce74dc..c36de3f6c 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/service.ts +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/service.ts @@ -453,7 +453,7 @@ export function getUnAvailableItem(data: any): Promise { export function getModelDetail(data: any): Promise { if (!data.modelId) { - return; + return {}; } return request.get(`${process.env.API_BASE_URL}model/getModel/${data.modelId}`); } diff --git a/webapp/packages/supersonic-fe/src/utils/utils.ts b/webapp/packages/supersonic-fe/src/utils/utils.ts index bf48da88f..e327acc85 100644 --- a/webapp/packages/supersonic-fe/src/utils/utils.ts +++ b/webapp/packages/supersonic-fe/src/utils/utils.ts @@ -502,3 +502,10 @@ export function decryptPassword(encryptPassword: string) { export function uniqueArray(arr: any[]) { return Array.from(new Set(arr)); } + +// 替换以:开头标记的变量 +export const replaceRouteParams = (template: string, values: Record): string => { + return template.replace(/:([a-zA-Z0-9_]+)/g, (match, key) => { + return values[key] !== undefined ? values[key] : match; + }); +}; From 9bccbae3bc8988b104b9871a502b58a78ec1e706 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Tue, 26 Nov 2024 22:22:14 +0800 Subject: [PATCH 19/88] [improvement][launcher]Set system property `s2.test` in junit tests in order to facilitate conditional breakpoints. --- launchers/standalone/pom.xml | 6 ++++++ .../test/java/com/tencent/supersonic/chat/MetricTest.java | 3 ++- .../com/tencent/supersonic/headless/QueryByMetricTest.java | 2 -- .../com/tencent/supersonic/headless/QueryByStructTest.java | 2 -- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/launchers/standalone/pom.xml b/launchers/standalone/pom.xml index 20a3a8b0a..885bbe67d 100644 --- a/launchers/standalone/pom.xml +++ b/launchers/standalone/pom.xml @@ -83,6 +83,12 @@ junit test + + org.junit-pioneer + junit-pioneer + 1.6.0 + test + org.springframework.boot spring-boot-starter-test diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java index 2d809bb70..c78959c89 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java @@ -14,6 +14,7 @@ import com.tencent.supersonic.headless.chat.query.rule.metric.MetricTopNQuery; import com.tencent.supersonic.util.DataUtils; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetSystemProperty; import org.springframework.boot.test.context.SpringBootTest; import java.text.DateFormat; @@ -72,8 +73,8 @@ public class MetricTest extends BaseTest { } @Test + @SetSystemProperty(key = "s2.test", value = "true") public void testMetricGroupBy() throws Exception { - System.setProperty("s2.test", "true"); QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java index c63732666..b4e3dd653 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java @@ -20,7 +20,6 @@ public class QueryByMetricTest extends BaseTest { @Test public void testWithMetricAndDimensionBizNames() throws Exception { - System.setProperty("s2.test", "true"); QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); @@ -58,7 +57,6 @@ public class QueryByMetricTest extends BaseTest { @Test public void testWithMetricAndDimensionIds() throws Exception { - System.setProperty("s2.test", "true"); QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setDomainId(1L); queryMetricReq.setMetricIds(Arrays.asList(1L, 3L)); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java index 43a50e590..7e1b3a37d 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByStructTest.java @@ -46,7 +46,6 @@ public class QueryByStructTest extends BaseTest { @Test public void testDetailQuery() throws Exception { - System.setProperty("s2.test", "true"); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("user_name", "department"), QueryType.DETAIL); SemanticQueryResp semanticQueryResp = @@ -87,7 +86,6 @@ public class QueryByStructTest extends BaseTest { @Test public void testFilterQuery() throws Exception { - System.setProperty("s2.test", "true"); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department")); List dimensionFilters = new ArrayList<>(); Filter filter = new Filter(); From 7bf1ba09c58d212258c66ea6d894295b6c21c3e9 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Tue, 26 Nov 2024 23:54:46 +0800 Subject: [PATCH 20/88] [fix][launcher]Struct unit tests must specify date filter. --- .../tencent/supersonic/common/pojo/Filter.java | 2 ++ .../tencent/supersonic/demo/S2VisitsDemo.java | 4 ++-- .../tencent/supersonic/chat/MetricTest.java | 3 ++- .../supersonic/headless/QueryByMetricTest.java | 18 ++++++++++++++++++ 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/common/src/main/java/com/tencent/supersonic/common/pojo/Filter.java b/common/src/main/java/com/tencent/supersonic/common/pojo/Filter.java index 9ecb491ec..adaab80d1 100644 --- a/common/src/main/java/com/tencent/supersonic/common/pojo/Filter.java +++ b/common/src/main/java/com/tencent/supersonic/common/pojo/Filter.java @@ -2,6 +2,7 @@ package com.tencent.supersonic.common.pojo; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @@ -10,6 +11,7 @@ import java.util.List; @Data @AllArgsConstructor @NoArgsConstructor +@Builder public class Filter { private Relation relation = Relation.FILTER; diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index f80458c31..35896939e 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -79,8 +79,8 @@ public class S2VisitsDemo extends S2BaseDemo { ModelResp userModel = addModel_1(s2Domain, demoDatabase); ModelResp pvUvModel = addModel_2(s2Domain, demoDatabase); ModelResp stayTimeModel = addModel_3(s2Domain, demoDatabase); - addModelRela(s2Domain, userModel, pvUvModel, "user_name"); - addModelRela(s2Domain, userModel, stayTimeModel, "user_name"); + addModelRela(s2Domain, pvUvModel, userModel, "user_name"); + addModelRela(s2Domain, stayTimeModel, userModel, "user_name"); // create metrics and dimensions DimensionResp departmentDimension = getDimension("department", userModel); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java index c78959c89..0dbc119f9 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java @@ -75,7 +75,7 @@ public class MetricTest extends BaseTest { @Test @SetSystemProperty(key = "s2.test", value = "true") public void testMetricGroupBy() throws Exception { - QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.productAgentId); + QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数和停留时长", DataUtils.productAgentId); QueryResult expectedResult = new QueryResult(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); @@ -85,6 +85,7 @@ public class MetricTest extends BaseTest { expectedParseInfo.setAggType(NONE); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); + expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("停留时长")); expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("部门")); expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 7, diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java index b4e3dd653..5f03983d5 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/QueryByMetricTest.java @@ -1,14 +1,18 @@ package com.tencent.supersonic.headless; +import com.tencent.supersonic.common.pojo.Filter; import com.tencent.supersonic.common.pojo.User; +import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.headless.api.pojo.request.QueryMetricReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp; import com.tencent.supersonic.headless.server.service.MetricService; import org.junit.Assert; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetSystemProperty; import org.springframework.beans.factory.annotation.Autowired; +import java.time.LocalDate; import java.util.Arrays; import static org.junit.Assert.assertThrows; @@ -23,16 +27,24 @@ public class QueryByMetricTest extends BaseTest { QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); + queryMetricReq.getFilters().add(Filter.builder().name("imp_date") + .operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER) + .value(LocalDate.now().toString()).build()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); Assert.assertNotNull(queryResp.getResultList()); Assert.assertEquals(6, queryResp.getResultList().size()); } @Test + @SetSystemProperty(key = "s2.test", value = "true") public void testWithMetricAndDimensionNames() throws Exception { QueryMetricReq queryMetricReq = new QueryMetricReq(); queryMetricReq.setMetricNames(Arrays.asList("停留时长", "访问次数")); queryMetricReq.setDimensionNames(Arrays.asList("用户", "部门")); + queryMetricReq.getFilters() + .add(Filter.builder().name("数据日期").operator(FilterOperatorEnum.MINOR_THAN_EQUALS) + .relation(Filter.Relation.FILTER).value(LocalDate.now().toString()) + .build()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); Assert.assertNotNull(queryResp.getResultList()); Assert.assertEquals(6, queryResp.getResultList().size()); @@ -44,6 +56,9 @@ public class QueryByMetricTest extends BaseTest { queryMetricReq.setDomainId(1L); queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); + queryMetricReq.getFilters().add(Filter.builder().name("imp_date") + .operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER) + .value(LocalDate.now().toString()).build()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); Assert.assertNotNull(queryResp.getResultList()); Assert.assertEquals(6, queryResp.getResultList().size()); @@ -61,6 +76,9 @@ public class QueryByMetricTest extends BaseTest { queryMetricReq.setDomainId(1L); queryMetricReq.setMetricIds(Arrays.asList(1L, 3L)); queryMetricReq.setDimensionIds(Arrays.asList(1L, 2L)); + queryMetricReq.getFilters().add(Filter.builder().name("imp_date") + .operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER) + .value(LocalDate.now().toString()).build()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); Assert.assertNotNull(queryResp.getResultList()); Assert.assertEquals(6, queryResp.getResultList().size()); From dad065d0bae020246560401c4f590a812c145491 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Wed, 27 Nov 2024 11:29:29 +0800 Subject: [PATCH 21/88] [improvement][headless]Clean code logic of headless translator. --- .../translator/DefaultSemanticTranslator.java | 2 +- .../translator/calcite/s2sql/Ontology.java | 4 - .../translator/calcite/sql/SqlBuilder.java | 16 +- .../calcite/sql/node/DataModelNode.java | 309 +++++++++--------- .../calcite/sql/render/JoinRender.java | 3 +- .../server/manager/ModelYamlManager.java | 1 - .../server/manager/SemanticSchemaManager.java | 78 ----- .../supersonic/demo/S2CompanyDemo.java | 15 +- .../tencent/supersonic/demo/S2VisitsDemo.java | 1 + .../supersonic/evaluation/Text2SQLEval.java | 31 +- 10 files changed, 184 insertions(+), 276 deletions(-) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java index 662819598..ef3b73e97 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/DefaultSemanticTranslator.java @@ -50,7 +50,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator { } } catch (Exception e) { queryStatement.setErrMsg(e.getMessage()); - log.error("Failed to translate semantic query [{}]", e.getMessage(), e); + log.error("Failed to translate query [{}]", e.getMessage(), e); } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java index da2e21698..c44e7225c 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/s2sql/Ontology.java @@ -25,8 +25,4 @@ public class Ontology { .collect(Collectors.toList()); } - public Map getModelMap() { - return dataModelMap.values().stream() - .collect(Collectors.toMap(DataModel::getId, dataSource -> dataSource)); - } } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java index 570854a07..54c79e468 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/SqlBuilder.java @@ -52,7 +52,7 @@ public class SqlBuilder { // find relevant data models scope = SchemaBuilder.getScope(schema); List dataModels = - DataModelNode.getRelatedDataModels(scope, schema, ontologyQueryParam); + DataModelNode.getQueryDataModels(scope, schema, ontologyQueryParam); if (dataModels == null || dataModels.isEmpty()) { throw new Exception("data model not found"); } @@ -98,20 +98,6 @@ public class SqlBuilder { return SemanticNode.getSql(parserNode, engineType); } - private String rewrite(String sql, EngineType engineType) { - try { - SqlNode sqlNode = - SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt(); - if (Objects.nonNull(sqlNode)) { - return SemanticNode.getSql( - SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType); - } - } catch (Exception e) { - log.error("optimize error {}", e.toString()); - } - return ""; - } - private void optimizeParseNode(EngineType engineType) { if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java index 5d4b38b49..b80458045 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/node/DataModelNode.java @@ -4,36 +4,17 @@ import com.google.common.collect.Lists; import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.pojo.enums.EngineType; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; -import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam; +import com.tencent.supersonic.headless.core.translator.calcite.s2sql.*; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder; import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.sql.SqlBasicCall; -import org.apache.calcite.sql.SqlDataTypeSpec; -import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.SqlNodeList; -import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec; +import org.apache.calcite.sql.*; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.validate.SqlValidatorScope; import org.springframework.util.CollectionUtils; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; @Slf4j @@ -53,7 +34,7 @@ public class DataModelNode extends SemanticNode { } } if (sqlTable.isEmpty()) { - throw new Exception("DatasourceNode build error [tableSqlNode not found]"); + throw new Exception("DataModelNode build error [tableSqlNode not found]"); } SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType())); addSchema(scope, dataModel, sqlTable); @@ -149,166 +130,171 @@ public class DataModelNode extends SemanticNode { return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); } - public static void getQueryDimensionMeasure(S2CalciteSchema schema, - OntologyQueryParam queryParam, Set queryDimensions, Set queryMeasures) { + public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam, + Set queryDimensions, Set queryMeasures) { queryDimensions.addAll(queryParam.getDimensions().stream() .map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d) .collect(Collectors.toSet())); Set schemaMetricName = - schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); - schema.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName())) + ontology.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); + ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName())) .forEach(m -> m.getMetricTypeParams().getMeasures().stream() .forEach(mm -> queryMeasures.add(mm.getName()))); queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)) .forEach(m -> queryMeasures.add(m)); } - public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, - OntologyQueryParam metricCommand, Set queryDimension, Set measures, + public static void mergeQueryFilterDimensionMeasure(Ontology ontology, + OntologyQueryParam queryParam, Set dimensions, Set measures, SqlValidatorScope scope) throws Exception { - EngineType engineType = schema.getOntology().getDatabase().getType(); - if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { + EngineType engineType = ontology.getDatabase().getType(); + if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) { Set filterConditions = new HashSet<>(); - FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), + FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType), filterConditions); Set queryMeasures = new HashSet<>(measures); - Set schemaMetricName = - schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); + Set schemaMetricName = ontology.getMetrics().stream().map(m -> m.getName()) + .collect(Collectors.toSet()); for (String filterCondition : filterConditions) { if (schemaMetricName.contains(filterCondition)) { - schema.getMetrics().stream() + ontology.getMetrics().stream() .filter(m -> m.getName().equalsIgnoreCase(filterCondition)) .forEach(m -> m.getMetricTypeParams().getMeasures().stream() .forEach(mm -> queryMeasures.add(mm.getName()))); continue; } - queryDimension.add(filterCondition); + dimensions.add(filterCondition); } measures.clear(); measures.addAll(queryMeasures); } } - public static List getRelatedDataModels(SqlValidatorScope scope, + public static List getQueryDataModels(SqlValidatorScope scope, S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception { - List dataModels = new ArrayList<>(); - - // check by metric + Ontology ontology = schema.getOntology(); + // get query measures and dimensions Set queryMeasures = new HashSet<>(); Set queryDimensions = new HashSet<>(); - getQueryDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures); - DataModel baseDataModel = null; - // one , match measure count - Map dataSourceMeasures = new HashMap<>(); - for (Map.Entry entry : schema.getDataModels().entrySet()) { - Set sourceMeasure = entry.getValue().getMeasures().stream() - .map(mm -> mm.getName()).collect(Collectors.toSet()); - sourceMeasure.retainAll(queryMeasures); - dataSourceMeasures.put(entry.getKey(), sourceMeasure.size()); - } - log.info("metrics: [{}]", dataSourceMeasures); - Optional> base = dataSourceMeasures.entrySet().stream() - .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); - if (base.isPresent()) { - baseDataModel = schema.getDataModels().get(base.get().getKey()); - dataModels.add(baseDataModel); - } - // second , check match all dimension and metric - if (baseDataModel != null) { - Set filterMeasure = new HashSet<>(); - Set sourceMeasure = baseDataModel.getMeasures().stream().map(mm -> mm.getName()) - .collect(Collectors.toSet()); - Set dimension = baseDataModel.getDimensions().stream().map(dd -> dd.getName()) - .collect(Collectors.toSet()); - baseDataModel.getIdentifiers().stream().forEach(i -> dimension.add(i.getName())); - if (schema.getDimensions().containsKey(baseDataModel.getName())) { - schema.getDimensions().get(baseDataModel.getName()).stream() - .forEach(d -> dimension.add(d.getName())); - } - filterMeasure.addAll(sourceMeasure); - filterMeasure.addAll(dimension); - EngineType engineType = schema.getOntology().getDatabase().getType(); - mergeQueryFilterDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures, - scope); - boolean isAllMatch = checkMatch(sourceMeasure, queryDimensions, queryMeasures, - dimension, queryParam, scope, engineType); - if (isAllMatch) { - log.debug("baseDataModel match all "); - return dataModels; - } - // find all dataSource has the same identifiers - List linkDataModels = getLinkDataSourcesByJoinRelation(queryDimensions, - queryMeasures, baseDataModel, schema); - if (CollectionUtils.isEmpty(linkDataModels)) { - log.debug("baseDataModel get by identifiers "); - Set baseIdentifiers = baseDataModel.getIdentifiers().stream() - .map(i -> i.getName()).collect(Collectors.toSet()); - if (baseIdentifiers.isEmpty()) { - throw new Exception( - "datasource error : " + baseDataModel.getName() + " miss identifier"); - } - linkDataModels = getLinkDataSources(baseIdentifiers, queryDimensions, queryMeasures, - baseDataModel, schema); - if (linkDataModels.isEmpty()) { - throw new Exception(String.format( - "not find the match datasource : dimension[%s],measure[%s]", + getQueryDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures); + mergeQueryFilterDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures, + scope); + + // first, find the base model + DataModel baseDataModel = findBaseModel(ontology, queryMeasures, queryDimensions); + if (Objects.isNull(baseDataModel)) { + throw new RuntimeException( + String.format("could not find matching dataModel, dimensions:%s, measures:%s", queryDimensions, queryMeasures)); - } - } - log.debug("linkDataModels {}", linkDataModels); - return linkDataModels; + } + // if the base model matches all queried measures and dimensions, just return + if (checkMatch(baseDataModel, queryMeasures, queryDimensions)) { + log.debug("baseDataModel match all measures and dimensions"); + return Collections.singletonList(baseDataModel); } - return dataModels; + // second, traverse the ontology to find other related dataModels + List relatedDataModels = findRelatedModelsByRelation(ontology, baseDataModel, + queryDimensions, queryMeasures); + if (CollectionUtils.isEmpty(relatedDataModels)) { + relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel, + queryDimensions, queryMeasures); + } + if (CollectionUtils.isEmpty(relatedDataModels)) { + relatedDataModels = Collections.singletonList(baseDataModel); + } + + log.debug("relatedDataModels {}", relatedDataModels); + return relatedDataModels; } - private static boolean checkMatch(Set sourceMeasure, Set queryDimension, - Set measures, Set dimension, OntologyQueryParam metricCommand, - SqlValidatorScope scope, EngineType engineType) throws Exception { - boolean isAllMatch = true; - sourceMeasure.retainAll(measures); - if (sourceMeasure.size() < measures.size()) { - log.info("baseDataSource measures not match all measure"); - // check dimension again - Set dimensionMeasures = new HashSet<>(); - dimensionMeasures.addAll(dimension); - dimensionMeasures.retainAll(measures); - if (sourceMeasure.size() + dimensionMeasures.size() < measures.size()) { - log.info("baseDataSource not match all measure"); - isAllMatch = false; + private static DataModel findBaseModel(Ontology ontology, Set queryMeasures, + Set queryDimensions) { + DataModel dataModel = null; + // first, try to find the model with the most matching measures + Map dataModelMeasuresCount = new HashMap<>(); + for (Map.Entry entry : ontology.getDataModelMap().entrySet()) { + Set sourceMeasure = entry.getValue().getMeasures().stream() + .map(Measure::getName).collect(Collectors.toSet()); + sourceMeasure.retainAll(queryMeasures); + dataModelMeasuresCount.put(entry.getKey(), sourceMeasure.size()); + } + log.info("dataModelMeasureCount: [{}]", dataModelMeasuresCount); + Optional> base = + dataModelMeasuresCount.entrySet().stream().filter(e -> e.getValue() > 0) + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); + + if (base.isPresent()) { + dataModel = ontology.getDataModelMap().get(base.get().getKey()); + } else { + // second, try to find the model with the most matching dimensions + Map dataModelDimCount = new HashMap<>(); + for (Map.Entry> entry : ontology.getDimensionMap().entrySet()) { + Set modelDimensions = entry.getValue().stream().map(Dimension::getName) + .collect(Collectors.toSet()); + modelDimensions.retainAll(queryDimensions); + dataModelDimCount.put(entry.getKey(), modelDimensions.size()); + } + log.info("dataModelDimCount: [{}]", dataModelDimCount); + base = dataModelDimCount.entrySet().stream().filter(e -> e.getValue() > 0) + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); + if (base.isPresent()) { + dataModel = ontology.getDataModelMap().get(base.get().getKey()); } } - measures.removeAll(sourceMeasure); - dimension.retainAll(queryDimension); - if (dimension.size() < queryDimension.size()) { - log.debug("baseDataSource not match all dimension"); + return dataModel; + } + + private static boolean checkMatch(DataModel baseDataModel, Set queryMeasures, + Set queryDimension) { + boolean isAllMatch = true; + Set baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName) + .collect(Collectors.toSet()); + Set baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName) + .collect(Collectors.toSet()); + baseDataModel.getIdentifiers().stream().forEach(i -> baseDimensions.add(i.getName())); + + baseMeasures.retainAll(queryMeasures); + if (baseMeasures.size() < queryMeasures.size()) { + // check dimension again + Set dimensionMeasures = new HashSet<>(); + dimensionMeasures.addAll(baseDimensions); + dimensionMeasures.retainAll(queryMeasures); + if (baseMeasures.size() + dimensionMeasures.size() < queryMeasures.size()) { + log.info("baseDataModel not match all measures"); + isAllMatch = false; + } + queryMeasures.removeAll(dimensionMeasures); + } + queryMeasures.removeAll(baseMeasures); + + baseDimensions.retainAll(queryDimension); + if (baseDimensions.size() < queryDimension.size()) { + log.debug("baseDataModel not match all dimensions"); isAllMatch = false; } - queryDimension.removeAll(dimension); + queryDimension.removeAll(baseDimensions); - if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { - Set whereFields = new HashSet<>(); - SqlNode sqlNode = parse(metricCommand.getWhere(), scope, engineType); - FilterNode.getFilterField(sqlNode, whereFields); - } return isAllMatch; } - private static List getLinkDataSourcesByJoinRelation(Set queryDimension, - Set measures, DataModel baseDataModel, S2CalciteSchema schema) { - Set linkDataSourceName = new HashSet<>(); - List linkDataModels = new ArrayList<>(); + private static List findRelatedModelsByRelation(Ontology ontology, + DataModel baseDataModel, Set queryDimensions, Set queryMeasures) { + Set joinDataModelNames = new HashSet<>(); + List joinDataModels = new ArrayList<>(); Set before = new HashSet<>(); before.add(baseDataModel.getName()); - if (!CollectionUtils.isEmpty(schema.getJoinRelations())) { + + if (!CollectionUtils.isEmpty(ontology.getJoinRelations())) { Set visitJoinRelations = new HashSet<>(); List sortedJoinRelation = new ArrayList<>(); - sortJoinRelation(schema.getJoinRelations(), baseDataModel.getName(), visitJoinRelations, - sortedJoinRelation); - schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId())) + sortJoinRelation(ontology.getJoinRelations(), baseDataModel.getName(), + visitJoinRelations, sortedJoinRelation); + ontology.getJoinRelations().stream() + .filter(j -> !visitJoinRelations.contains(j.getId())) .forEach(j -> sortedJoinRelation.add(j)); for (JoinRelation joinRelation : sortedJoinRelation) { if (!before.contains(joinRelation.getLeft()) @@ -317,53 +303,54 @@ public class DataModelNode extends SemanticNode { } boolean isMatch = false; boolean isRight = before.contains(joinRelation.getLeft()); - DataModel other = isRight ? schema.getDataModels().get(joinRelation.getRight()) - : schema.getDataModels().get(joinRelation.getLeft()); - if (!queryDimension.isEmpty()) { + DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight()) + : ontology.getDataModelMap().get(joinRelation.getLeft()); + if (!queryDimensions.isEmpty()) { Set linkDimension = other.getDimensions().stream() .map(dd -> dd.getName()).collect(Collectors.toSet()); other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName())); - linkDimension.retainAll(queryDimension); + linkDimension.retainAll(queryDimensions); if (!linkDimension.isEmpty()) { isMatch = true; } } - Set linkMeasure = other.getMeasures().stream().map(mm -> mm.getName()) + Set linkMeasure = other.getMeasures().stream().map(Measure::getName) .collect(Collectors.toSet()); - linkMeasure.retainAll(measures); + linkMeasure.retainAll(queryMeasures); if (!linkMeasure.isEmpty()) { isMatch = true; } - if (!isMatch && schema.getDimensions().containsKey(other.getName())) { - Set linkDimension = schema.getDimensions().get(other.getName()).stream() - .map(dd -> dd.getName()).collect(Collectors.toSet()); - linkDimension.retainAll(queryDimension); + if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) { + Set linkDimension = ontology.getDimensionMap().get(other.getName()) + .stream().map(dd -> dd.getName()).collect(Collectors.toSet()); + linkDimension.retainAll(queryDimensions); if (!linkDimension.isEmpty()) { isMatch = true; } } if (isMatch) { - linkDataSourceName.add(other.getName()); + joinDataModelNames.add(other.getName()); before.add(other.getName()); } } } - if (!CollectionUtils.isEmpty(linkDataSourceName)) { + if (!CollectionUtils.isEmpty(joinDataModelNames)) { Map orders = new HashMap<>(); - linkDataSourceName.add(baseDataModel.getName()); + joinDataModelNames.add(baseDataModel.getName()); orders.put(baseDataModel.getName(), 0L); - for (JoinRelation joinRelation : schema.getJoinRelations()) { - if (linkDataSourceName.contains(joinRelation.getLeft()) - && linkDataSourceName.contains(joinRelation.getRight())) { + for (JoinRelation joinRelation : ontology.getJoinRelations()) { + if (joinDataModelNames.contains(joinRelation.getLeft()) + && joinDataModelNames.contains(joinRelation.getRight())) { orders.put(joinRelation.getLeft(), 0L); orders.put(joinRelation.getRight(), 1L); } } orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> { - linkDataModels.add(schema.getDataModels().get(d.getKey())); + joinDataModels.add(ontology.getDataModelMap().get(d.getKey())); }); } - return linkDataModels; + + return joinDataModels; } private static void sortJoinRelation(List joinRelations, String next, @@ -381,12 +368,17 @@ public class DataModelNode extends SemanticNode { } } - private static List getLinkDataSources(Set baseIdentifiers, - Set queryDimension, Set measures, DataModel baseDataModel, - S2CalciteSchema schema) { + private static List findRelatedModelsByIdentifier(Ontology ontology, + DataModel baseDataModel, Set queryDimension, Set measures) { + Set baseIdentifiers = baseDataModel.getIdentifiers().stream().map(Identify::getName) + .collect(Collectors.toSet()); + if (baseIdentifiers.isEmpty()) { + return Collections.EMPTY_LIST; + } + Set linkDataSourceName = new HashSet<>(); List linkDataModels = new ArrayList<>(); - for (Map.Entry entry : schema.getDataModels().entrySet()) { + for (Map.Entry entry : ontology.getDataModelMap().entrySet()) { if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) { continue; } @@ -417,9 +409,9 @@ public class DataModelNode extends SemanticNode { } } } - for (Map.Entry> entry : schema.getDimensions().entrySet()) { + for (Map.Entry> entry : ontology.getDimensionMap().entrySet()) { if (!queryDimension.isEmpty()) { - Set linkDimension = entry.getValue().stream().map(dd -> dd.getName()) + Set linkDimension = entry.getValue().stream().map(Dimension::getName) .collect(Collectors.toSet()); linkDimension.retainAll(queryDimension); if (!linkDimension.isEmpty()) { @@ -428,7 +420,7 @@ public class DataModelNode extends SemanticNode { } } for (String linkName : linkDataSourceName) { - linkDataModels.add(schema.getDataModels().get(linkName)); + linkDataModels.add(ontology.getDataModelMap().get(linkName)); } if (!CollectionUtils.isEmpty(linkDataModels)) { List all = new ArrayList<>(); @@ -438,4 +430,5 @@ public class DataModelNode extends SemanticNode { } return Lists.newArrayList(); } + } diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java index 531a773b4..50fad152a 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/calcite/sql/render/JoinRender.java @@ -60,7 +60,8 @@ public class JoinRender extends Renderer { } Set queryAllDimension = new HashSet<>(); Set measures = new HashSet<>(); - DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); + DataModelNode.getQueryDimensionMeasure(schema.getOntology(), metricCommand, + queryAllDimension, measures); SqlNode left = null; TableView leftTable = null; TableView innerView = new TableView(); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/ModelYamlManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/ModelYamlManager.java index 3a96b718f..44b2fb6bd 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/ModelYamlManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/ModelYamlManager.java @@ -33,7 +33,6 @@ public class ModelYamlManager { ModelDetail modelDetail = modelResp.getModelDetail(); DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType()); SysTimeDimensionBuilder.addSysTimeDimension(modelDetail.getDimensions(), engineAdaptor); - addInterCntMetric(modelResp.getBizName(), modelDetail); DataModelYamlTpl dataModelYamlTpl = new DataModelYamlTpl(); dataModelYamlTpl.setType(databaseResp.getType()); BeanUtils.copyProperties(modelDetail, dataModelYamlTpl); diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java index 5a6228b9e..cb4d828af 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/manager/SemanticSchemaManager.java @@ -2,10 +2,8 @@ package com.tencent.supersonic.headless.server.manager; import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; -import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; -import com.tencent.supersonic.headless.api.pojo.response.TagResp; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.*; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization.TimePartType; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; @@ -64,82 +62,6 @@ public class SemanticSchemaManager { return ontology; } - public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception { - if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) { - throw new Exception("semanticSchemaResp tag is empty"); - } - Ontology ontology = buildOntology(semanticSchemaResp); - // Map> dimensions = new HashMap<>(); - Map> tagMap = new HashMap<>(); - for (TagResp tagResp : semanticSchemaResp.getTags()) { - if (!tagMap.containsKey(tagResp.getModelId())) { - tagMap.put(tagResp.getModelId(), new ArrayList<>()); - } - tagMap.get(tagResp.getModelId()).add(tagResp); - } - if (Objects.nonNull(ontology.getDataModelMap()) && !ontology.getDataModelMap().isEmpty()) { - for (Map.Entry entry : ontology.getDataModelMap().entrySet()) { - List modelDimensions = new ArrayList<>(); - if (!ontology.getDimensionMap().containsKey(entry.getKey())) { - ontology.getDimensionMap().put(entry.getKey(), modelDimensions); - } else { - modelDimensions = ontology.getDimensionMap().get(entry.getKey()); - } - if (tagMap.containsKey(entry.getValue().getId())) { - for (TagResp tagResp : tagMap.get(entry.getValue().getId())) { - addTagModel(tagResp, modelDimensions, ontology.getMetrics()); - } - } - } - } - - return ontology; - } - - private void addTagModel(TagResp tagResp, List modelDimensions, - List modelMetrics) throws Exception { - TagDefineType tagDefineType = TagDefineType.valueOf(tagResp.getTagDefineType()); - switch (tagDefineType) { - case FIELD: - case DIMENSION: - if (TagDefineType.DIMENSION.equals(tagResp.getTagDefineType())) { - Optional modelDimension = modelDimensions.stream() - // .filter(d -> d.getBizName().equals(tagResp.getExpr())) - .findFirst(); - if (modelDimension.isPresent()) { - modelDimension.get().setName(tagResp.getBizName()); - return; - } - } - Dimension dimension = Dimension.builder().build(); - dimension.setType(""); - // dimension.setExpr(tagResp.getExpr()); - dimension.setName(tagResp.getBizName()); - dimension.setOwners(""); - dimension.setBizName(tagResp.getBizName()); - if (Objects.isNull(dimension.getDataType())) { - dimension.setDataType(DataType.UNKNOWN); - } - - DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams(); - dimension.setDimensionTimeTypeParams(dimensionTimeTypeParams); - modelDimensions.add(dimension); - return; - case METRIC: - Optional modelMetric = modelMetrics.stream() - // .filter(m -> m.getName().equalsIgnoreCase(tagResp.getExpr())) - .findFirst(); - if (modelMetric.isPresent()) { - modelMetric.get().setName(tagResp.getBizName()); - } else { - throw new Exception( - String.format("tag [{}] cant find the metric", tagResp.getBizName())); - } - return; - default: - } - } - public static List getMetrics(final List t) { return getMetricsByMetricYamlTpl(t); } diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java index df425a2c9..6c19cec37 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2CompanyDemo.java @@ -41,8 +41,8 @@ public class S2CompanyDemo extends S2BaseDemo { ModelResp model_brand = addModel_2(domain, demoDatabase); ModelResp model_brand_revenue = addModel_3(domain, demoDatabase); - addModelRela(domain, model_company, model_brand, "company_id"); - addModelRela(domain, model_brand, model_brand_revenue, "brand_id"); + addModelRela(domain, model_brand, model_company, "company_id"); + addModelRela(domain, model_brand_revenue, model_brand, "brand_id"); DataSetResp dataset = addDataSet(domain); addAgent(dataset.getId()); @@ -106,8 +106,7 @@ public class S2CompanyDemo extends S2BaseDemo { modelDetail.setMeasures(measures); modelDetail.setQueryType("sql_query"); - modelDetail.setSqlQuery("SELECT company_id,company_name,headquarter_address," - + "company_established_time,founder,ceo,annual_turnover,employee_count FROM company"); + modelDetail.setSqlQuery("SELECT * FROM company"); modelReq.setModelDetail(modelDetail); ModelResp companyModel = modelService.createModel(modelReq, defaultUser); @@ -146,8 +145,7 @@ public class S2CompanyDemo extends S2BaseDemo { modelDetail.setMeasures(measures); modelDetail.setQueryType("sql_query"); - modelDetail.setSqlQuery("SELECT brand_id,brand_name,brand_established_time," - + "company_id,legal_representative,registered_capital FROM brand"); + modelDetail.setSqlQuery("SELECT * FROM brand"); modelReq.setModelDetail(modelDetail); ModelResp brandModel = modelService.createModel(modelReq, defaultUser); @@ -187,8 +185,7 @@ public class S2CompanyDemo extends S2BaseDemo { modelDetail.setMeasures(measures); modelDetail.setQueryType("sql_query"); - modelDetail.setSqlQuery("SELECT year_time,brand_id,revenue,profit," - + "revenue_growth_year_on_year,profit_growth_year_on_year FROM brand_revenue"); + modelDetail.setSqlQuery("SELECT * FROM brand_revenue"); modelReq.setModelDetail(modelDetail); return modelService.createModel(modelReq, defaultUser); } @@ -227,7 +224,7 @@ public class S2CompanyDemo extends S2BaseDemo { modelRelaReq.setDomainId(domain.getId()); modelRelaReq.setFromModelId(fromModel.getId()); modelRelaReq.setToModelId(toModel.getId()); - modelRelaReq.setJoinType("left join"); + modelRelaReq.setJoinType("inner join"); modelRelaReq.setJoinConditions(joinConditions); modelRelaService.save(modelRelaReq, defaultUser); } diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java index 35896939e..69e04f2ae 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2VisitsDemo.java @@ -199,6 +199,7 @@ public class S2VisitsDemo extends S2BaseDemo { List dimensions = new ArrayList<>(); dimensions.add(new Dim("部门", "department", DimensionType.categorical, 1)); + // dimensions.add(new Dim("用户", "user_name", DimensionType.categorical, 1)); modelDetail.setDimensions(dimensions); List fields = Lists.newArrayList(); fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build()); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/evaluation/Text2SQLEval.java b/launchers/standalone/src/test/java/com/tencent/supersonic/evaluation/Text2SQLEval.java index 21a6869c6..4dd9dfb7d 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/evaluation/Text2SQLEval.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/evaluation/Text2SQLEval.java @@ -5,7 +5,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.tencent.supersonic.chat.BaseTest; import com.tencent.supersonic.chat.api.pojo.response.QueryResult; -import com.tencent.supersonic.chat.server.agent.*; +import com.tencent.supersonic.chat.server.agent.Agent; +import com.tencent.supersonic.chat.server.agent.AgentToolType; +import com.tencent.supersonic.chat.server.agent.DatasetTool; +import com.tencent.supersonic.chat.server.agent.ToolConfig; import com.tencent.supersonic.common.config.ChatModel; import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.User; @@ -133,11 +136,28 @@ public class Text2SQLEval extends BaseTest { assert result.getTextResult().contains("3"); } + @Test + public void test_detail_query() throws Exception { + long start = System.currentTimeMillis(); + QueryResult result = submitNewChat("特斯拉旗下有哪些品牌", agentId); + durations.add(System.currentTimeMillis() - start); + assert result.getQueryColumns().size() >= 1; + assert result.getTextResult().contains("Model Y"); + assert result.getTextResult().contains("Model 3"); + } + public Agent getLLMAgent() { Agent agent = new Agent(); agent.setName("Agent for Test"); ToolConfig toolConfig = new ToolConfig(); - toolConfig.getTools().add(getDatasetTool()); + DatasetTool datasetTool = new DatasetTool(); + datasetTool.setType(AgentToolType.DATASET); + datasetTool.setDataSetIds(Lists.newArrayList(DataUtils.productDatasetId)); + toolConfig.getTools().add(datasetTool); + DatasetTool datasetTool2 = new DatasetTool(); + datasetTool2.setType(AgentToolType.DATASET); + datasetTool2.setDataSetIds(Lists.newArrayList(DataUtils.companyDatasetId)); + toolConfig.getTools().add(datasetTool2); agent.setToolConfig(JSONObject.toJSONString(toolConfig)); // create chat model for this evaluation ChatModel chatModel = new ChatModel(); @@ -154,11 +174,4 @@ public class Text2SQLEval extends BaseTest { return agent; } - private static DatasetTool getDatasetTool() { - DatasetTool datasetTool = new DatasetTool(); - datasetTool.setType(AgentToolType.DATASET); - datasetTool.setDataSetIds(Lists.newArrayList(1L)); - - return datasetTool; - } } From 25559fdaa5c4f2092cd962db62d3348372b20120 Mon Sep 17 00:00:00 2001 From: lexluo09 <39718951+lexluo09@users.noreply.github.com> Date: Wed, 27 Nov 2024 15:03:00 +0800 Subject: [PATCH 22/88] [improvement][headless] add validateAndQuery interface in SqlQueryApiController --- .../facade/rest/SqlQueryApiController.java | 33 ++++++++++++++++--- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java index c82b540ad..5e2c46f4d 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java @@ -6,6 +6,7 @@ import javax.servlet.http.HttpServletResponse; import com.tencent.supersonic.auth.api.authentication.utils.UserHolder; import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.util.StringUtil; +import com.tencent.supersonic.headless.api.pojo.SqlEvaluation; import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; import com.tencent.supersonic.headless.api.pojo.request.QuerySqlsReq; import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq; @@ -38,7 +39,7 @@ public class SqlQueryApiController { @PostMapping("/sql") public Object queryBySql(@RequestBody QuerySqlReq querySqlReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); String sql = querySqlReq.getSql(); querySqlReq.setSql(StringUtil.replaceBackticks(sql)); @@ -48,7 +49,7 @@ public class SqlQueryApiController { @PostMapping("/sqls") public Object queryBySqls(@RequestBody QuerySqlsReq querySqlsReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); List semanticQueryReqs = querySqlsReq.getSqls().stream().map(sql -> { QuerySqlReq querySqlReq = new QuerySqlReq(); @@ -72,7 +73,7 @@ public class SqlQueryApiController { @PostMapping("/sqlsWithException") public Object queryBySqlsWithException(@RequestBody QuerySqlsReq querySqlsReq, - HttpServletRequest request, HttpServletResponse response) throws Exception { + HttpServletRequest request, HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); List semanticQueryReqs = querySqlsReq.getSqls().stream().map(sql -> { QuerySqlReq querySqlReq = new QuerySqlReq(); @@ -96,10 +97,34 @@ public class SqlQueryApiController { @PostMapping("/validate") public Object validate(@RequestBody QuerySqlReq querySqlReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); String sql = querySqlReq.getSql(); querySqlReq.setSql(StringUtil.replaceBackticks(sql)); return chatLayerService.validate(querySqlReq, user); } + + @PostMapping("/validateAndQuery") + public Object validateAndQuery(@RequestBody QuerySqlsReq querySqlsReq, + HttpServletRequest request, HttpServletResponse response) throws Exception { + User user = UserHolder.findUser(request, response); + List convert = convert(querySqlsReq); + for (QuerySqlReq querySqlReq : convert) { + SqlEvaluation validate = chatLayerService.validate(querySqlReq, user); + if (!validate.getIsValidated()) { + throw new Exception(validate.getValidateMsg()); + } + } + return queryBySqls(querySqlsReq, request, response); + } + + private List convert(QuerySqlsReq querySqlsReq) { + return querySqlsReq.getSqls().stream().map(sql -> { + QuerySqlReq querySqlReq = new QuerySqlReq(); + BeanUtils.copyProperties(querySqlsReq, querySqlReq); + querySqlReq.setSql(StringUtil.replaceBackticks(sql)); + return querySqlReq; + }).collect(Collectors.toList()); + } + } From 111304486b4e1fdbbeab23522b3a5d6e233b3df4 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Wed, 27 Nov 2024 22:26:30 +0800 Subject: [PATCH 23/88] [improvement][launcher]Use API to get element ID avoiding hard-code. --- .../converter/SqlQueryConverter.java | 4 ++-- .../facade/rest/SqlQueryApiController.java | 10 ++++----- .../com/tencent/supersonic/chat/BaseTest.java | 10 +++++++++ .../tencent/supersonic/chat/DetailTest.java | 16 ++++++++++---- .../tencent/supersonic/chat/MetricTest.java | 22 ++++++++++++++----- 5 files changed, 45 insertions(+), 17 deletions(-) diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java index f5d1a7696..d9ad61187 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/translator/converter/SqlQueryConverter.java @@ -87,7 +87,7 @@ public class SqlQueryConverter implements QueryConverter { if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql) && !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) { log.debug("getAggOption simple sql set to DEFAULT"); - return AggOption.DEFAULT; + return AggOption.NATIVE; } // if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE" @@ -107,7 +107,7 @@ public class SqlQueryConverter implements QueryConverter { .count(); if (defaultAggNullCnt > 0) { log.debug("getAggOption find null defaultAgg metric set to NATIVE"); - return AggOption.OUTER; + return AggOption.DEFAULT; } return AggOption.DEFAULT; } diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java index 5e2c46f4d..f8e2af71a 100644 --- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java +++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/facade/rest/SqlQueryApiController.java @@ -39,7 +39,7 @@ public class SqlQueryApiController { @PostMapping("/sql") public Object queryBySql(@RequestBody QuerySqlReq querySqlReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); String sql = querySqlReq.getSql(); querySqlReq.setSql(StringUtil.replaceBackticks(sql)); @@ -49,7 +49,7 @@ public class SqlQueryApiController { @PostMapping("/sqls") public Object queryBySqls(@RequestBody QuerySqlsReq querySqlsReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); List semanticQueryReqs = querySqlsReq.getSqls().stream().map(sql -> { QuerySqlReq querySqlReq = new QuerySqlReq(); @@ -73,7 +73,7 @@ public class SqlQueryApiController { @PostMapping("/sqlsWithException") public Object queryBySqlsWithException(@RequestBody QuerySqlsReq querySqlsReq, - HttpServletRequest request, HttpServletResponse response) throws Exception { + HttpServletRequest request, HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); List semanticQueryReqs = querySqlsReq.getSqls().stream().map(sql -> { QuerySqlReq querySqlReq = new QuerySqlReq(); @@ -97,7 +97,7 @@ public class SqlQueryApiController { @PostMapping("/validate") public Object validate(@RequestBody QuerySqlReq querySqlReq, HttpServletRequest request, - HttpServletResponse response) throws Exception { + HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); String sql = querySqlReq.getSql(); querySqlReq.setSql(StringUtil.replaceBackticks(sql)); @@ -106,7 +106,7 @@ public class SqlQueryApiController { @PostMapping("/validateAndQuery") public Object validateAndQuery(@RequestBody QuerySqlsReq querySqlsReq, - HttpServletRequest request, HttpServletResponse response) throws Exception { + HttpServletRequest request, HttpServletResponse response) throws Exception { User user = UserHolder.findUser(request, response); List convert = convert(querySqlsReq); for (QuerySqlReq querySqlReq : convert) { diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java index cab07b66c..d92be2031 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/BaseTest.java @@ -13,12 +13,14 @@ import com.tencent.supersonic.common.service.ChatModelService; import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.response.QueryState; +import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.util.DataUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import java.time.LocalDate; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -37,6 +39,8 @@ public class BaseTest extends BaseApplication { protected AgentService agentService; @Autowired protected ChatModelService chatModelService; + @Autowired + protected SchemaService schemaService; @Value("${s2.demo.enableLLM:false}") protected boolean enableLLM; @@ -106,4 +110,10 @@ public class BaseTest extends BaseApplication { assertEquals(expectedParseInfo.getDateInfo(), actualParseInfo.getDateInfo()); } + + protected SchemaElement getSchemaElementByName(Set elementSet, String name) { + Optional matchElement = + elementSet.stream().filter(e -> e.getName().equals(name)).findFirst(); + return matchElement.orElse(null); + } } diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java index 966943e10..dcb5a7865 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/DetailTest.java @@ -5,6 +5,7 @@ import com.tencent.supersonic.chat.api.pojo.response.QueryResult; import com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.QueryType; +import com.tencent.supersonic.headless.api.pojo.DataSetSchema; import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.request.QueryFilter; @@ -12,6 +13,7 @@ import com.tencent.supersonic.headless.chat.query.rule.detail.DetailDimensionQue import com.tencent.supersonic.util.DataUtils; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetSystemProperty; import org.springframework.boot.test.context.SpringBootTest; @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @@ -19,6 +21,7 @@ import org.springframework.boot.test.context.SpringBootTest; public class DetailTest extends BaseTest { @Test + @SetSystemProperty(key = "s2.test", value = "true") public void test_detail_dimension() throws Exception { QueryResult actualResult = submitNewChat("周杰伦流派和代表作", DataUtils.singerAgentId); @@ -30,8 +33,11 @@ public class DetailTest extends BaseTest { expectedParseInfo.setQueryType(QueryType.DETAIL); expectedParseInfo.setAggType(AggregateTypeEnum.NONE); - QueryFilter dimensionFilter = - DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, "周杰伦", "歌手名", 17L); + DataSetSchema schema = schemaService.getDataSetSchema(DataUtils.singerDatasettId); + SchemaElement singerElement = getSchemaElementByName(schema.getDimensions(), "歌手名"); + + QueryFilter dimensionFilter = DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, + "周杰伦", "歌手名", singerElement.getId()); expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.getDimensions() @@ -53,8 +59,10 @@ public class DetailTest extends BaseTest { expectedParseInfo.setQueryType(QueryType.DETAIL); expectedParseInfo.setAggType(AggregateTypeEnum.NONE); - QueryFilter dimensionFilter = - DataUtils.getFilter("genre", FilterOperatorEnum.EQUALS, "国风", "流派", 7L); + DataSetSchema schema = schemaService.getDataSetSchema(DataUtils.singerDatasettId); + SchemaElement genreElement = getSchemaElementByName(schema.getDimensions(), "流派"); + QueryFilter dimensionFilter = DataUtils.getFilter("genre", FilterOperatorEnum.EQUALS, "国风", + "流派", genreElement.getId()); expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.getDimensions() .addAll(Lists.newArrayList(SchemaElement.builder().name("歌手名").build())); diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java index 0dbc119f9..e7c2683ad 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java @@ -5,6 +5,8 @@ import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.QueryType; +import com.tencent.supersonic.headless.api.pojo.DataSetSchema; +import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.request.QueryFilter; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricFilterQuery; @@ -61,8 +63,11 @@ public class MetricTest extends BaseTest { expectedParseInfo.setAggType(NONE); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); - expectedParseInfo.getDimensionFilters().add( - DataUtils.getFilter("user_name", FilterOperatorEnum.EQUALS, "alice", "用户", 2L)); + + DataSetSchema schema = schemaService.getDataSetSchema(DataUtils.productDatasetId); + SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); + expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name", + FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId())); expectedParseInfo.setDateInfo( DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay)); @@ -111,8 +116,11 @@ public class MetricTest extends BaseTest { List list = new ArrayList<>(); list.add("alice"); list.add("lucy"); - QueryFilter dimensionFilter = - DataUtils.getFilter("user_name", FilterOperatorEnum.IN, list, "用户", 2L); + + DataSetSchema schema = schemaService.getDataSetSchema(DataUtils.productDatasetId); + SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); + QueryFilter dimensionFilter = DataUtils.getFilter("user_name", FilterOperatorEnum.IN, list, + "用户", userElement.getId()); expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.setDateInfo( @@ -182,9 +190,11 @@ public class MetricTest extends BaseTest { expectedResult.setQueryMode(MetricFilterQuery.QUERY_MODE); expectedParseInfo.setAggType(NONE); + DataSetSchema schema = schemaService.getDataSetSchema(DataUtils.productDatasetId); + SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); - expectedParseInfo.getDimensionFilters().add( - DataUtils.getFilter("user_name", FilterOperatorEnum.EQUALS, "alice", "用户", 2L)); + expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name", + FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId())); expectedParseInfo.setDateInfo( DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 1, period, startDay, startDay)); From c80794e8fc809092c3a96699af94c2b8c764ff08 Mon Sep 17 00:00:00 2001 From: jerryjzhang Date: Thu, 28 Nov 2024 09:28:17 +0800 Subject: [PATCH 24/88] [fix][launcher]Fix mysql scripts. --- .../parser/llm/OnePassSCSqlGenStrategy.java | 4 +-- .../tencent/supersonic/demo/S2BaseDemo.java | 4 +-- .../src/main/resources/application-local.yaml | 18 ++++++++++++- .../resources/config.update/sql-update.sql | 4 +-- .../src/main/resources/db/data-mysql-demo.sql | 4 +-- .../main/resources/db/schema-mysql-demo.sql | 4 +-- .../src/main/resources/db/schema-mysql.sql | 2 ++ .../supersonic/util/LLMConfigUtils.java | 25 +++++++++++++++++-- .../src/test/resources/application-local.yaml | 20 +++++++++++++-- 9 files changed, 70 insertions(+), 15 deletions(-) diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java index a41be00ab..68f6fe39a 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java @@ -37,8 +37,8 @@ public class OnePassSCSqlGenStrategy extends SqlGenStrategy { + "please convert it to a SQL query so that relevant data could be returned " + "by executing the SQL query against underlying database." + "\n#Rules:" + "\n1.SQL columns and values must be mentioned in the `Schema`, DO NOT hallucinate." - + "\n2.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator." - + "\n3.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`." + + "\n2.ALWAYS specify time range using `>`,`<`,`>=`,`<=` operator." + + "\n3.DO NOT include time range in the where clause if not explicitly expressed in the `Question`." + "\n4.DO NOT calculate date range using functions." + "\n5.ALWAYS use `with` statement if nested aggregation is needed." + "\n6.ALWAYS enclose alias declared by `AS` command in underscores." diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2BaseDemo.java b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2BaseDemo.java index bf69e5a4f..fa94e706f 100644 --- a/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2BaseDemo.java +++ b/launchers/standalone/src/main/java/com/tencent/supersonic/demo/S2BaseDemo.java @@ -119,8 +119,8 @@ public abstract class S2BaseDemo implements CommandLineRunner { } String url = dataSourceProperties.getUrl(); DatabaseReq databaseReq = new DatabaseReq(); - databaseReq.setName("H2数据库DEMO"); - databaseReq.setDescription("样例数据库实例仅用于体验,正式使用请切换持久化数据库"); + databaseReq.setName("S2数据库DEMO"); + databaseReq.setDescription("样例数据库实例仅用于体验"); if (StringUtils.isNotBlank(url) && url.toLowerCase().contains(DataType.MYSQL.getFeature().toLowerCase())) { databaseReq.setType(DataType.MYSQL.getFeature()); diff --git a/launchers/standalone/src/main/resources/application-local.yaml b/launchers/standalone/src/main/resources/application-local.yaml index 7c2737f4a..6cc6e76cc 100644 --- a/launchers/standalone/src/main/resources/application-local.yaml +++ b/launchers/standalone/src/main/resources/application-local.yaml @@ -11,4 +11,20 @@ spring: h2: console: path: /h2-console/semantic - enabled: true \ No newline at end of file + enabled: true + +### Comment out following lines if using MySQL +#spring: +# datasource: +# driver-class-name: com.mysql.cj.jdbc.Driver +# url: jdbc:mysql://localhost:3306/s2_database?user=root +# username: root +# password: +# sql: +# init: +# schema-locations: classpath:db/schema-mysql.sql,classpath:db/schema-mysql-demo.sql +# data-locations: classpath:db/data-mysql.sql,classpath:db/data-mysql-demo.sql +# h2: +# console: +# path: /h2-console/semantic +# enabled: true \ No newline at end of file diff --git a/launchers/standalone/src/main/resources/config.update/sql-update.sql b/launchers/standalone/src/main/resources/config.update/sql-update.sql index 17c43e127..c9e6c84f9 100644 --- a/launchers/standalone/src/main/resources/config.update/sql-update.sql +++ b/launchers/standalone/src/main/resources/config.update/sql-update.sql @@ -396,5 +396,5 @@ ALTER TABLE s2_agent DROP COLUMN `enable_memory_review`; alter table s2_agent add column `enable_feedback` tinyint DEFAULT 1; --20241116 -alter table s2_agent add column `admin` varchar(1000); -alter table s2_agent add column `viewer` varchar(1000); \ No newline at end of file +alter table s2_agent add column `admin` varchar(1000) COLLATE utf8_unicode_ci DEFAULT NULL; +alter table s2_agent add column `viewer` varchar(1000) COLLATE utf8_unicode_ci DEFAULT NULL; \ No newline at end of file diff --git a/launchers/standalone/src/main/resources/db/data-mysql-demo.sql b/launchers/standalone/src/main/resources/db/data-mysql-demo.sql index d438213fe..b4d29935d 100644 --- a/launchers/standalone/src/main/resources/db/data-mysql-demo.sql +++ b/launchers/standalone/src/main/resources/db/data-mysql-demo.sql @@ -1,4 +1,4 @@ --------S2VisitsDemo +-- S2VisitsDemo insert into s2_user_department (user_name, department) values ('jack','HR'); insert into s2_user_department (user_name, department) values ('tom','sales'); insert into s2_user_department (user_name, department) values ('lucy','marketing'); @@ -1019,7 +1019,7 @@ INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES ( INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 15 DAY), 'lucy', '0.8124302447925607', 'p4'); INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 8 DAY), 'lucy', '0.039935860913407284', 'p2'); --------S2ArtistDemo +-- S2ArtistDemo INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); diff --git a/launchers/standalone/src/main/resources/db/schema-mysql-demo.sql b/launchers/standalone/src/main/resources/db/schema-mysql-demo.sql index 904286964..194dba2fb 100644 --- a/launchers/standalone/src/main/resources/db/schema-mysql-demo.sql +++ b/launchers/standalone/src/main/resources/db/schema-mysql-demo.sql @@ -1,4 +1,4 @@ --------S2VisitsDemo +-- S2VisitsDemo CREATE TABLE IF NOT EXISTS `s2_user_department` ( `user_name` varchar(200) NOT NULL, `department` varchar(200) NOT NULL @@ -27,7 +27,7 @@ CREATE TABLE IF NOT EXISTS `singer` ( `favor_cnt` bigint DEFAULT NULL )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; --------S2ArtistDemo +-- S2ArtistDemo CREATE TABLE IF NOT EXISTS `genre` ( `g_name` varchar(20) NOT NULL , -- genre name `rating` INT , diff --git a/launchers/standalone/src/main/resources/db/schema-mysql.sql b/launchers/standalone/src/main/resources/db/schema-mysql.sql index 2ea501551..7d13ad998 100644 --- a/launchers/standalone/src/main/resources/db/schema-mysql.sql +++ b/launchers/standalone/src/main/resources/db/schema-mysql.sql @@ -15,6 +15,8 @@ CREATE TABLE IF NOT EXISTS `s2_agent` ( `created_at` datetime DEFAULT NULL, `updated_by` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `updated_at` datetime DEFAULT NULL, + `admin` varchar(1000) COLLATE utf8_unicode_ci DEFAULT NULL, + `viewer` varchar(1000) COLLATE utf8_unicode_ci DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/util/LLMConfigUtils.java b/launchers/standalone/src/test/java/com/tencent/supersonic/util/LLMConfigUtils.java index f95056482..88d855e02 100644 --- a/launchers/standalone/src/test/java/com/tencent/supersonic/util/LLMConfigUtils.java +++ b/launchers/standalone/src/test/java/com/tencent/supersonic/util/LLMConfigUtils.java @@ -11,7 +11,12 @@ public class LLMConfigUtils { OPENAI_GLM(false), OLLAMA_LLAMA3(true), OLLAMA_QWEN2(true), - OLLAMA_QWEN25(true); + OLLAMA_QWEN25_7B(true), + OLLAMA_QWEN25_14B(true), + OLLAMA_QWEN25_CODE_7B(true), + OLLAMA_QWEN25_CODE_3B(true), + OLLAMA_GLM4(true); + public boolean isOllam; @@ -35,10 +40,26 @@ public class LLMConfigUtils { baseUrl = "http://localhost:11434"; modelName = "qwen2:7b"; break; - case OLLAMA_QWEN25: + case OLLAMA_QWEN25_7B: baseUrl = "http://localhost:11434"; modelName = "qwen2.5:7b"; break; + case OLLAMA_QWEN25_14B: + baseUrl = "http://localhost:11434"; + modelName = "qwen2.5:14b"; + break; + case OLLAMA_QWEN25_CODE_7B: + baseUrl = "http://localhost:11434"; + modelName = "qwen2.5-coder:7b"; + break; + case OLLAMA_QWEN25_CODE_3B: + baseUrl = "http://localhost:11434"; + modelName = "qwen2.5-coder:3b"; + break; + case OLLAMA_GLM4: + baseUrl = "http://localhost:11434"; + modelName = "glm4:latest"; + break; case OPENAI_GLM: baseUrl = "https://open.bigmodel.cn/api/pas/v4/"; apiKey = "REPLACE_WITH_YOUR_KEY"; diff --git a/launchers/standalone/src/test/resources/application-local.yaml b/launchers/standalone/src/test/resources/application-local.yaml index 2ceb32c1e..6cc6e76cc 100644 --- a/launchers/standalone/src/test/resources/application-local.yaml +++ b/launchers/standalone/src/test/resources/application-local.yaml @@ -1,7 +1,7 @@ spring: datasource: driver-class-name: org.h2.Driver - url: jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false;QUERY_TIMEOUT=100 + url: jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false;QUERY_TIMEOUT=30 username: root password: semantic sql: @@ -11,4 +11,20 @@ spring: h2: console: path: /h2-console/semantic - enabled: true \ No newline at end of file + enabled: true + +### Comment out following lines if using MySQL +#spring: +# datasource: +# driver-class-name: com.mysql.cj.jdbc.Driver +# url: jdbc:mysql://localhost:3306/s2_database?user=root +# username: root +# password: +# sql: +# init: +# schema-locations: classpath:db/schema-mysql.sql,classpath:db/schema-mysql-demo.sql +# data-locations: classpath:db/data-mysql.sql,classpath:db/data-mysql-demo.sql +# h2: +# console: +# path: /h2-console/semantic +# enabled: true \ No newline at end of file From b4669cf110e2a89ada7150751776e8f152ae2be0 Mon Sep 17 00:00:00 2001 From: tristanliu Date: Thu, 28 Nov 2024 20:29:12 +0800 Subject: [PATCH 25/88] [improvement][headless-fe] code stash --- .../packages/supersonic-fe/config/routes.ts | 98 +---- webapp/packages/supersonic-fe/src/app.tsx | 13 +- .../supersonic-fe/src/common/constants.ts | 2 + .../src/pages/SemanticModel/DomainManager.tsx | 9 +- .../Insights/components/TagInfoCreateForm.tsx | 1 - .../src/pages/SemanticModel/Metric/Detail.tsx | 24 +- .../src/pages/SemanticModel/Metric/Edit.tsx | 54 +-- .../src/pages/SemanticModel/Metric/Edit2.tsx | 57 --- .../src/pages/SemanticModel/Metric/Market.tsx | 2 - .../SemanticModel/Metric/MetricInfoSider.tsx | 9 - .../Metric/components/MetricFilter.tsx | 2 +- .../components/MetricInfoCreateForm.tsx | 16 +- .../src/pages/SemanticModel/Metric/index.tsx | 2 +- .../src/pages/SemanticModel/Metric/style.less | 326 +---------------- .../src/pages/SemanticModel/ModelManager.tsx | 9 +- .../pages/SemanticModel/OverviewContainer.tsx | 56 +-- .../pages/SemanticModel/PageBreadcrumb.tsx | 6 +- .../View/components/DataSetTable.tsx | 8 +- .../View/components/DatasetCreateForm.tsx | 289 +++++++++++++++ .../SemanticModel/View/components/Detail.tsx | 118 +++++++ .../View/components/ViewCreateFormModal.tsx | 1 - .../components/ClassMetricTable.tsx | 10 +- .../DetailContainer/DetailFormWrapper.tsx | 55 +++ .../DetailContainer/DetailSider.tsx} | 84 ++--- .../components/DetailContainer/index.tsx | 22 ++ .../components/DetailContainer/style.less | 334 ++++++++++++++++++ .../components/DetailContainer/type.ts | 5 + .../SemanticModel/components/ModelTable.tsx | 5 +- .../components/TableColumnRender.tsx | 2 +- .../pages/SemanticModel/components/style.less | 2 +- .../pages/SemanticModel/models/domainData.ts | 4 +- .../src/pages/SemanticModel/service.ts | 6 + .../src/pages/SemanticModel/utils.tsx | 18 + .../packages/supersonic-fe/src/utils/utils.ts | 6 + 34 files changed, 991 insertions(+), 664 deletions(-) delete mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit2.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DatasetCreateForm.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/Detail.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DetailContainer/DetailFormWrapper.tsx rename webapp/packages/supersonic-fe/src/pages/SemanticModel/{Metric/MetricInfoEditSider.tsx => components/DetailContainer/DetailSider.tsx} (60%) create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DetailContainer/index.tsx create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DetailContainer/style.less create mode 100644 webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DetailContainer/type.ts diff --git a/webapp/packages/supersonic-fe/config/routes.ts b/webapp/packages/supersonic-fe/config/routes.ts index 04a8413d8..8f91deed4 100644 --- a/webapp/packages/supersonic-fe/config/routes.ts +++ b/webapp/packages/supersonic-fe/config/routes.ts @@ -91,6 +91,11 @@ const ROUTES = [ }, ], }, + { + path: '/model/dataset/:domainId/:datasetId', + component: './SemanticModel/View/components/Detail', + envEnableList: [ENV_KEY.SEMANTIC], + }, { path: '/model/metric/:domainId/:modelId/:metricId', component: './SemanticModel/Metric/Edit', @@ -102,101 +107,8 @@ const ROUTES = [ // }, // ], }, - // { - // path: '/model/manager/', - // component: './SemanticModel/OverviewContainer', - // routes: [ - // { - // path: '/model/manager/:domainId/:modelId', - // component: './SemanticModel/ModelManager', - // routes: [ - // { - // path: '/model/manager/:domainId/:modelId/:menuKey', - // component: './SemanticModel/ModelManager', - // }, - // ], - // }, - // ], - // }, - // { - // path: '/model/:domainId', - // component: './SemanticModel/DomainManager', - // envEnableList: [ENV_KEY.SEMANTIC], - // routes: [ - // { - // path: '/model/:domainId/:menuKey', - // component: './SemanticModel/DomainManager', - // }, - // ], - // }, - // { - // path: '/model/manager/:domainId/:modelId', - // component: './SemanticModel/ModelManager', - // envEnableList: [ENV_KEY.SEMANTIC], - // routes: [ - // { - // path: '/model/manager/:domainId/:modelId/:menuKey', - // component: './SemanticModel/ModelManager', - // }, - // ], - // }, - - // { - // path: '/model/:domainId/:modelId/:menuKey', - // component: './SemanticModel/DomainManager', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, - // { - // path: '/model/:domainId/:modelId/metric', - // component: './SemanticModel/components/ModelMetric', - // envEnableList: [ENV_KEY.SEMANTIC], - // routes: [ - // { - // path: '/model/:domainId/:modelId/metric/list', - // component: './SemanticModel/components/ClassMetricTable', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, - // ], - // }, ], }, - // { - // path: '/model/', - // component: './SemanticModel/DomainManager', - // name: 'semanticModel', - // envEnableList: [ENV_KEY.SEMANTIC], - // routes: [ - // { - // path: '/model/:domainId/:modelId', - // component: './SemanticModel/DomainManager', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, - // { - // path: '/model/:domainId/:modelId/:menuKey', - // component: './SemanticModel/DomainManager', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, - // { - // path: '/model/:domainId/:modelId/metric', - // component: './SemanticModel/components/ModelMetric', - // envEnableList: [ENV_KEY.SEMANTIC], - // routes: [ - // { - // path: '/model/:domainId/:modelId/metric/list', - // component: './SemanticModel/components/ClassMetricTable', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, - // ], - // }, - // ], - // }, - - // { - // path: '/model/:domainId/:modelId/:menuKey', - // component: './SemanticModel/DomainManager', - // name: 'semanticModel', - // envEnableList: [ENV_KEY.SEMANTIC], - // }, { path: '/metric', diff --git a/webapp/packages/supersonic-fe/src/app.tsx b/webapp/packages/supersonic-fe/src/app.tsx index 107a7ea68..bcc5d7a69 100644 --- a/webapp/packages/supersonic-fe/src/app.tsx +++ b/webapp/packages/supersonic-fe/src/app.tsx @@ -12,6 +12,7 @@ import { publicPath } from '../config/defaultSettings'; import { Copilot } from 'supersonic-chat-sdk'; import { configProviderTheme } from '../config/themeSettings'; export { request } from './services/request'; +import { BASE_TITLE } from '@/common/constants'; import { ROUTE_AUTH_CODES } from '../config/routes'; import AppPage from './pages/index'; @@ -106,12 +107,12 @@ export async function getInitialState(): Promise<{ // } export function onRouteChange() { - const title = window.document.title.split('-SuperSonic')[0]; - if (!title.includes('SuperSonic')) { - window.document.title = `${title}-SuperSonic`; - } else { - window.document.title = 'SuperSonic'; - } + setTimeout(() => { + let title = window.document.title; + if (!title.toLowerCase().endsWith(BASE_TITLE.toLowerCase())) { + window.document.title = `${title}-${BASE_TITLE}`; + } + }, 100); } export const layout: RunTimeLayoutConfig = (params) => { diff --git a/webapp/packages/supersonic-fe/src/common/constants.ts b/webapp/packages/supersonic-fe/src/common/constants.ts index 0b4f2a520..c3beadcb3 100644 --- a/webapp/packages/supersonic-fe/src/common/constants.ts +++ b/webapp/packages/supersonic-fe/src/common/constants.ts @@ -3,6 +3,8 @@ export const AUTH_TOKEN_KEY = process.env.APP_TARGET === 'inner' ? 'TME_TOKEN' : // 记录上次访问页面 export const FROM_URL_KEY = 'FROM_URL'; +export const BASE_TITLE = 'Supersonic'; + export const PRIMARY_COLOR = '#f87653'; export const CHART_BLUE_COLOR = '#446dff'; export const CHAT_BLUE = '#1b4aef'; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx index d278a710b..677b3b561 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/DomainManager.tsx @@ -1,6 +1,7 @@ import React, { useState } from 'react'; -import { history, useParams, useModel } from '@umijs/max'; +import { useParams, useModel } from '@umijs/max'; import DomainManagerTab from './components/DomainManagerTab'; +import { toDomainList } from '@/pages/SemanticModel/utils'; type Props = {}; @@ -14,16 +15,12 @@ const DomainManager: React.FC = ({}) => { const [activeKey, setActiveKey] = useState(menuKey); - const pushUrlMenu = (domainId: number, menuKey: string) => { - history.push(`/model/domain/${domainId}/${menuKey}`); - }; - return ( { setActiveKey(menuKey); - pushUrlMenu(selectDomainId, menuKey); + toDomainList(selectDomainId, menuKey); }} /> ); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/components/TagInfoCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/components/TagInfoCreateForm.tsx index 89a02cd43..8601f28e5 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/components/TagInfoCreateForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Insights/components/TagInfoCreateForm.tsx @@ -540,7 +540,6 @@ const TagInfoCreateForm: React.FC = ({ forceRender width={800} style={{ top: 48 }} - // styles={{ padding: '32px 40px 48px' }} destroyOnClose title={`${isEdit ? '编辑' : '新建'}标签`} maskClosable={false} diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Detail.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Detail.tsx index f3b7a8137..1d76f26a9 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Detail.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Detail.tsx @@ -1,7 +1,8 @@ import { message, Tabs, Button, Space } from 'antd'; import React, { useState, useEffect } from 'react'; import { getMetricData, getDimensionList, getDrillDownDimension } from '../service'; -import { useParams, history } from '@umijs/max'; +import { useParams, history, Helmet } from '@umijs/max'; +import { BASE_TITLE } from '@/common/constants'; import styles from './style.less'; import { ArrowLeftOutlined } from '@ant-design/icons'; import MetricTrendSection from '@/pages/SemanticModel/Metric/components/MetricTrendSection'; @@ -80,7 +81,7 @@ const MetricDetail: React.FC = () => { { key: 'metricCaliberInput', label: '基础信息', - children: , + children: , }, { key: 'metricTrend', @@ -103,8 +104,18 @@ const MetricDetail: React.FC = () => { return ( <> +
+
+ { + setMetricRelationModalOpenState(true); + }} + /> +
= () => { className={styles.metricDetailTab} />
-
- { - setMetricRelationModalOpenState(true); - }} - /> -
; @@ -15,7 +17,7 @@ const MetricDetail: React.FC = () => { const metricId = params.metricId; const [metircData, setMetircData] = useState(); const metricModel = useModel('SemanticModel.metricData'); - const { selectMetric, setSelectMetric } = metricModel; + const { setSelectMetric } = metricModel; const [settingKey, setSettingKey] = useState(MetricSettingKey.BASIC); useEffect(() => { @@ -41,23 +43,35 @@ const MetricDetail: React.FC = () => { message.error(msg); }; + const settingList = [ + { + icon: , + key: MetricSettingKey.BASIC, + text: MetricSettingWording[MetricSettingKey.BASIC], + }, + { + icon: , + key: MetricSettingKey.SQL_CONFIG, + text: MetricSettingWording[MetricSettingKey.SQL_CONFIG], + }, + ]; + return ( <> -
-
-
- { - setSettingKey(key); - }} - metircData={metircData} - /> -
-
- -
-
-
+ + { + setSettingKey(key); + }} + /> + } + containerNode={} + /> ); }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit2.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit2.tsx deleted file mode 100644 index f81b3bba7..000000000 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Edit2.tsx +++ /dev/null @@ -1,57 +0,0 @@ -import { message } from 'antd'; -import React, { useState, useEffect } from 'react'; -import { getMetricData } from '../service'; -import { useParams } from '@umijs/max'; -import styles from './style.less'; -import { ISemantic } from '../data'; -import MetricInfoEditSider from './MetricInfoEditSider'; -import MetricInfoCreateForm from './components/MetricInfoCreateForm'; -import { MetricSettingKey } from './constants'; - -type Props = Record; - -const MetricDetail: React.FC = () => { - const params: any = useParams(); - const metricId = params.metricId; - const [metircData, setMetircData] = useState(); - - const [settingKey, setSettingKey] = useState(MetricSettingKey.BASIC); - - useEffect(() => { - if (!metricId) { - return; - } - queryMetricData(metricId); - }, [metricId]); - - const queryMetricData = async (metricId: string) => { - const { code, data, msg } = await getMetricData(metricId); - if (code === 200) { - setMetircData({ ...data }); - return; - } - message.error(msg); - }; - - return ( - <> -
-
- {/*
- { - setSettingKey(key); - }} - metircData={metircData} - /> -
*/} -
- -
-
-
- - ); -}; - -export default MetricDetail; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Market.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Market.tsx index 606c3ec09..11700f26f 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Market.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/Market.tsx @@ -152,8 +152,6 @@ const ClassMetricTable: React.FC = ({}) => { }; const handleMetricEdit = (metricItem: ISemantic.IMetricItem) => { - // setMetricItem(metricItem); - // setCreateModalVisible(true); history.push(`/model/metric/edit/${metricItem.id}`); }; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/MetricInfoSider.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/MetricInfoSider.tsx index 9ce982bef..3f9d92d38 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/MetricInfoSider.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/MetricInfoSider.tsx @@ -59,15 +59,6 @@ const MetricInfoSider: React.FC = ({

- {/*
- - - - 基本信息 - - -
*/} -
敏感度: diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricFilter.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricFilter.tsx index 26385eda9..075c3c1d8 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricFilter.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricFilter.tsx @@ -1,4 +1,4 @@ -import { Form, Input, Space, Row, Col, Switch, Flex, Tag } from 'antd'; +import { Form, Input, Space, Row, Col } from 'antd'; import StandardFormRow from '@/components/StandardFormRow'; import TagSelect from '@/components/TagSelect'; import React, { ReactNode, useEffect } from 'react'; diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx index 217b25e05..075ccd2fe 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/components/MetricInfoCreateForm.tsx @@ -29,7 +29,6 @@ import { batchCreateTag, batchDeleteTag, } from '../../service'; -import { ArrowLeftOutlined } from '@ant-design/icons'; import MetricMetricFormTable from '../../components/MetricMetricFormTable'; import MetricFieldFormTable from '../../components/MetricFieldFormTable'; import DimensionAndMetricRelationModal from '../../components/DimensionAndMetricRelationModal'; @@ -68,7 +67,6 @@ const MetricInfoCreateForm: React.FC = ({ const isEdit = !!metricItem?.id; const domainId = metricItem?.domainId; const modelId = metricItem?.modelId; - const [currentStep, setCurrentStep] = useState(0); const formValRef = useRef({} as any); const [form] = Form.useForm(); const updateFormVal = (val: any) => { @@ -827,7 +825,10 @@ const MetricInfoCreateForm: React.FC = ({ {MetricSettingWording[settingKey]} - + {/* + */}
@@ -870,13 +871,6 @@ const MetricInfoCreateForm: React.FC = ({ {renderContent()}
-
-
- -
-
{ return ( diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/style.less b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/style.less index 7d9f57bb4..f1f9006d0 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/style.less +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/Metric/style.less @@ -183,68 +183,7 @@ } } -.metricEditWrapper { - .metricDetailTab { - :global { - .ant-tabs-nav { - margin: 10px 20px 0 20px; - padding: 0 20px; - background-color: rgb(255, 255, 255); - border-radius: 8px; - transition: box-shadow 300ms cubic-bezier(0.4, 0, 0.2, 1) 0ms; - } - .ant-tabs-tab { - padding: 12px 0; - color: #344767; - font-weight: 500; - } - } - } - .metricDetail { - position: relative; - display: flex; - flex-direction: row; - width: 100%; - padding: 0px; - background-color: transparent; - height: 100%; - .tabContainer { - padding: 24px; - min-height: calc(100vh - 78px); - width: calc(100vw - 350px); - background-color: #fafafb; - } - .metricInfoContent { - padding: 25px; - .title { - position: relative; - margin-bottom: 12px; - color: #0e73ff; - font-weight: bold; - font-size: 16px; - &::before { - position: absolute; - top: 10px; - left: -10px; - display: block; - width: 3px; - height: 14px; - font-size: 0; - background: #0e73ff; - border: 1px solid #0e73ff; - border-radius: 2px; - content: ''; - } - } - } - .siderContainer { - width: 350px; - min-height: calc(100vh - 78px); - border-radius: 6px; - padding: 24px 0 24px 24px; - } - } -} + .metricDetailWrapper { height: calc(100vh - 56px); @@ -276,7 +215,8 @@ .tabContainer { height: 100%; min-height: calc(100vh - 78px); - width: calc(100vw - 450px); + // width: calc(100vw - 450px); + width: 100%; background-color: #fafafb; } .metricInfoContent { @@ -305,9 +245,8 @@ .siderContainer { width: 450px; min-height: calc(100vh - 78px); - margin: 10px 20px 20px 0; - background-color: rgb(255, 255, 255); border-radius: 6px; + padding: 10px 0 24px 24px; box-shadow: rgba(0, 0, 0, 0.08) 6px 0px 16px 0px, rgba(0, 0, 0, 0.12) 3px 0px 6px -4px, rgba(0, 0, 0, 0.05) 9px 0px 28px 8px; } @@ -326,262 +265,5 @@ transition: box-shadow 300ms cubic-bezier(0.4, 0, 0.2, 1) 0ms; } -.metricInfoSider { - padding: 20px; - color: #344767; - background-color: #fff; - height: 100%; - border: 1px solid #e6ebf1; - border-radius: 6px; - .createTitle { - margin-bottom: 10px; - color:#344767; - font-weight: 500; - font-size: 16px; - font-family: var(--tencent-font-family); - } - .gotoMetricListIcon { - color: #3182ce; - cursor: pointer; - &:hover { - color: #5493ff; - } - } - .title { - margin-bottom: 20px; - .name { - font-weight: 600; - font-size: 18px; - } - .bizName { - margin: 5px 0 0 25px; - color: #7b809a; - font-weight: 400; - } - } - .desc { - display: block; - margin-top: 8px; - color: #7b809a; - font-weight: 500; - font-size: 14px; - line-height: 1.9; - } - .subTitle { - margin: 0px; - color: rgb(123, 128, 154); - font-weight: 700; - font-size: 14px; - line-height: 1.25; - letter-spacing: 0.03333em; - text-transform: uppercase; - text-decoration: none; - vertical-align: unset; - opacity: 1; - } - .sectionContainer { - width: 100%; - height: 100%; - position: relative; - display: flex; - flex-direction: column; - overflow: scroll; - overflow: hidden; - background-image: none; - border-radius: 6px; - .section { - padding: 16px; - color: rgb(52, 71, 103); - line-height: 1.25; - background: transparent; - box-shadow: none; - opacity: 1; - .sectionTitleBox { - padding: 8px 0; - color: rgb(52, 71, 103); - background: transparent; - box-shadow: none; - opacity: 1; - .sectionTitle { - margin: 0px; - color: rgb(52, 71, 103); - font-weight: 600; - font-size: 16px; - line-height: 1.625; - letter-spacing: 0.0075em; - text-transform: capitalize; - text-decoration: none; - vertical-align: unset; - opacity: 1; - } - } - - .item { - display: flex; - padding-top: 8px; - padding-right: 16px; - padding-bottom: 8px; - color: rgb(52, 71, 103); - background: transparent; - box-shadow: none; - opacity: 1; - .itemLable { - min-width: fit-content; - margin: 0px; - margin-right: 10px; - color: #344767; - font-weight: 700; - font-size: 14px; - line-height: 1.5; - letter-spacing: 0.02857em; - text-transform: capitalize; - text-decoration: none; - vertical-align: unset; - opacity: 1; - } - .itemValue { - margin: 0px; - color: #7b809a; - font-weight: 400; - font-size: 14px; - line-height: 1.5; - letter-spacing: 0.02857em; - text-transform: none; - text-decoration: none; - vertical-align: unset; - opacity: 1; - } - } - } - .hr { - flex-shrink: 0; - margin: 0px; - border-color: rgb(242, 244, 247); - // border-width: 0px 0px thin; - border-style: solid; - } - .ctrlBox { - .ctrlList { - position: relative; - margin: 0px; - padding: 8px 0px; - list-style: none; - background-color: rgb(249, 250, 251); - li { - position: relative; - display: flex; - flex-grow: 1; - align-items: center; - justify-content: flex-start; - box-sizing: border-box; - min-width: 0px; - margin: 4px; - padding: 4px 16px; - color: inherit; - text-align: left; - text-decoration: none; - vertical-align: middle; - background-color: transparent; - border: 0px; - border-radius: 0px; - outline: 0px; - cursor: pointer; - transition: background-color 150ms cubic-bezier(0.4, 0, 0.2, 1) 0ms; - appearance: none; - user-select: none; - -webkit-tap-highlight-color: transparent; - -webkit-box-flex: 1; - -webkit-box-pack: start; - -webkit-box-align: center; - &:hover { - color: #3182ce; - text-decoration: none; - background-color: rgba(16, 24, 40, 0.04); - } - } - .ctrlItemIcon { - flex-shrink: 0; - min-width: unset; - margin-right: 5px; - font-size: 14px; - } - .styles.ctrlItemLable { - display: block; - margin: 0px; - font-weight: 400; - font-size: 14px; - line-height: 1.6; - } - } - } - } -} - -.settingList { - list-style: none; - margin: 0px; - position: relative; - padding: 0px; - li { - -webkit-tap-highlight-color: transparent; - background-color: transparent; - outline: 0px; - border: 0px; - margin: 0px; - border-radius: 0px; - cursor: pointer; - user-select: none; - vertical-align: middle; - appearance: none; - display: flex; - flex-grow: 1; - justify-content: flex-start; - align-items: center; - position: relative; - text-decoration: none; - min-width: 0px; - box-sizing: border-box; - text-align: left; - padding: 8px 16px; - transition: background-color 150ms cubic-bezier(0.4, 0, 0.2, 1) 0ms; - &.active { - background-color: rgba(22, 119, 255, 0.08); - .icon { - color: rgb(22, 119, 255); - } - .content { - .text { - color: rgb(22, 119, 255); - } - } - } - .icon { - min-width: 32px; - color: #344767; - flex-shrink: 0; - display: inline-flex; - } - .content { - flex: 1 1 auto; - min-width: 0px; - margin-top: 4px; - margin-bottom: 4px; - .text { - margin: 0px; - color: #344767; - font-size: 16px; - // line-height: 1.57; - // font-family: var(--tencent-font-family); - font-weight: 600; - display: block; - } - } - &:hover { - text-decoration: none; - background-color: rgba(0, 0, 0, 0.04); - } - } -} - diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx index 1cf463814..fd91435e5 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/ModelManager.tsx @@ -1,6 +1,7 @@ import React, { useEffect, useState } from 'react'; import { history, useParams, useModel } from '@umijs/max'; import ModelManagerTab from './components/ModelManagerTab'; +import { toModelList } from '@/pages/SemanticModel/utils'; type Props = {}; @@ -21,7 +22,7 @@ const ModelManager: React.FC = ({}) => { const initModelConfig = () => { const currentMenuKey = menuKey === defaultTabKey ? '' : menuKey; - pushUrlMenu(selectDomainId, selectModelId, currentMenuKey); + toModelList(selectDomainId, selectModelId, currentMenuKey); setActiveKey(currentMenuKey); }; @@ -34,17 +35,13 @@ const ModelManager: React.FC = ({}) => { MrefreshMetricList({ modelId: selectModelId }); }, [selectModelId]); - const pushUrlMenu = (domainId: number, modelId: number, menuKey: string) => { - history.push(`/model/domain/manager/${domainId}/${modelId}/${menuKey}`); - }; - return ( { setActiveKey(menuKey); - pushUrlMenu(selectDomainId, selectModelId, menuKey); + toModelList(selectDomainId, selectModelId, menuKey); }} /> ); diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx index e99c011ef..472c1f5e7 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/OverviewContainer.tsx @@ -1,9 +1,10 @@ import React, { useEffect, useState } from 'react'; -import { history, useParams, useModel, Outlet } from '@umijs/max'; +import { useParams, useModel, Outlet } from '@umijs/max'; import DomainListTree from './components/DomainList'; import styles from './components/style.less'; import { LeftOutlined, RightOutlined } from '@ant-design/icons'; import { ISemantic } from './data'; +import { toDomainList } from '@/pages/SemanticModel/utils'; type Props = {}; @@ -14,10 +15,8 @@ const OverviewContainer: React.FC = ({}) => { const modelId = params.modelId; const domainModel = useModel('SemanticModel.domainData'); const modelModel = useModel('SemanticModel.modelData'); - const databaseModel = useModel('SemanticModel.databaseData'); - const { setSelectDomain, setDomainList, selectDomainId } = domainModel; + const { setSelectDomain, selectDomainId } = domainModel; const { setSelectModel, setModelTableHistoryParams, MrefreshModelList } = modelModel; - const { MrefreshDatabaseList } = databaseModel; const menuKey = params.menuKey ? params.menuKey : !Number(modelId) ? defaultTabKey : ''; const [collapsedState, setCollapsedState] = useState(true); @@ -25,51 +24,11 @@ const OverviewContainer: React.FC = ({}) => { if (!selectDomainId || `${domainId}` === `${selectDomainId}`) { return; } - pushUrlMenu(selectDomainId, menuKey); + toDomainList(selectDomainId, menuKey); }, [selectDomainId]); - // const initSelectedDomain = (domainList: ISemantic.IDomainItem[]) => { - // const targetNode = domainList.filter((item: any) => { - // return `${item.id}` === domainId; - // })[0]; - // if (!targetNode) { - // const firstRootNode = domainList.filter((item: any) => { - // return item.parentId === 0; - // })[0]; - // if (firstRootNode) { - // const { id } = firstRootNode; - // setSelectDomain(firstRootNode); - // pushUrlMenu(id, menuKey); - // } - // } else { - // setSelectDomain(targetNode); - // } - // }; - - // const initProjectTree = async () => { - // const { code, data, msg } = await getDomainList(); - // if (code === 200) { - // initSelectedDomain(data); - // setDomainList(data); - // } else { - // message.error(msg); - // } - // }; - - // useEffect(() => { - // initProjectTree(); - // MrefreshDatabaseList(); - // return () => { - // setSelectDomain(undefined); - // }; - // }, []); - - const pushUrlMenu = (domainId: number, menuKey: string) => { - history.push(`/model/domain/${domainId}/${menuKey}`); - }; - - const cleanModelInfo = (domainId) => { - pushUrlMenu(domainId, defaultTabKey); + const cleanModelInfo = (domainId: number) => { + toDomainList(domainId, defaultTabKey); setSelectModel(undefined); }; @@ -102,9 +61,6 @@ const OverviewContainer: React.FC = ({}) => { [id]: {}, }); }} - // onTreeDataUpdate={() => { - // // initProjectTree(); - // }} />
diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx index 8c3efdc37..828455c86 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/PageBreadcrumb.tsx @@ -1,9 +1,9 @@ -import { Outlet } from '@umijs/max'; import { Tabs, Breadcrumb, Space, Radio } from 'antd'; import React, { useRef, useEffect, useState } from 'react'; import { history, useModel } from '@umijs/max'; import { HomeOutlined, FundViewOutlined } from '@ant-design/icons'; import styles from './components/style.less'; +import { toDomainList, toModelList } from '@/pages/SemanticModel/utils'; const PageBreadcrumb: React.FC = () => { const domainModel = useModel('SemanticModel.domainData'); @@ -20,7 +20,7 @@ const PageBreadcrumb: React.FC = () => { { setSelectModel(undefined); - history.push(`/model/domain/${selectDomainId}/overview`); + toDomainList(selectDomainId, 'overview'); }} > @@ -41,7 +41,7 @@ const PageBreadcrumb: React.FC = () => { { setSelectMetric(undefined); - history.push(`/model/domain/manager/${selectDomainId}/${selectModelId}/`); + toModelList(selectDomainId, selectModelId); }} > diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx index d75534090..7f39127f4 100644 --- a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx @@ -11,6 +11,7 @@ import styles from '../../components/style.less'; import { ISemantic } from '../../data'; import { ColumnsConfig } from '../../components/TableColumnRender'; import ViewSearchFormModal from './ViewSearchFormModal'; +import { toDatasetEditPage } from '@/pages/SemanticModel/utils'; type Props = { // dataSetList: ISemantic.IDatasetItem[]; @@ -90,9 +91,10 @@ const DataSetTable: React.FC = ({ disabledEdit = false }) => { return ( { - setEditFormStep(1); - setViewItem(record); - setCreateDataSourceModalOpen(true); + toDatasetEditPage(record.domainId, record.id); + // setEditFormStep(1); + // setViewItem(record); + // setCreateDataSourceModalOpen(true); }} > {name} diff --git a/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DatasetCreateForm.tsx b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DatasetCreateForm.tsx new file mode 100644 index 000000000..5e15035f7 --- /dev/null +++ b/webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DatasetCreateForm.tsx @@ -0,0 +1,289 @@ +import React, { useState, useEffect, useRef } from 'react'; +import { Form, Button, Modal, Input, Select, Steps, Spin, Space } from 'antd'; +import styles from '../../components/style.less'; +import { message } from 'antd'; +import { formLayout } from '@/components/FormHelper/utils'; +import { createView, updateView, getDimensionList, queryMetric } from '../../service'; +import { ISemantic } from '../../data'; +import FormItemTitle from '@/components/FormHelper/FormItemTitle'; +import SelectTMEPerson from '@/components/SelectTMEPerson'; +import ViewModelConfigTransfer from './ViewModelConfigTransfer'; +import type { FormInstance } from 'antd'; + +const FormItem = Form.Item; + +export type ModelCreateFormModalProps = { + // step: number; + form: FormInstance; + activeKey: string; + domainId: number; + viewItem: any; + modelList: ISemantic.IModelItem[]; + onCancel: () => void; + onSubmit: (values: any) => void; +}; + +const DatasetCreateForm: React.FC = ({ + form, + activeKey, + viewItem, + domainId, + onCancel, + onSubmit, + modelList, +}) => { + const stepWidth: Record = { + '0': 800, + '1': 1200, + '2': 800, + }; + // const [currentStep, setCurrentStep] = useState(); + + const [formVals, setFormVals] = useState({ + ...viewItem, + currentModel: modelList[0]?.id, + }); + + const [queryType, setQueryType] = useState('METRIC'); + + const [saveLoading, setSaveLoading] = useState(false); + const [dimensionLoading, setDimensionLoading] = useState(false); + // const [modalWidth, setModalWidth] = useState(stepWidth[`${currentStep}`]); + const [selectedModelItem, setSelectedModelItem] = useState( + modelList[0], + ); + // const [form] = Form.useForm(); + const configTableRef = useRef(); + + useEffect(() => { + form.setFieldsValue({ + ...viewItem, + }); + // setQueryType(viewItem?.queryType); + }, [viewItem]); + + const [dimensionList, setDimensionList] = useState(); + const [metricList, setMetricList] = useState(); + // const [tagList, setTagList] = useState(); + + useEffect(() => { + console.log(selectedModelItem, 'selectedModelItemselectedModelItem'); + if (selectedModelItem?.id) { + queryDimensionList(selectedModelItem.id); + queryMetricList(selectedModelItem.id); + // queryTagList(selectedModelItem.id); + } + }, [selectedModelItem]); + + const queryDimensionList = async (modelId: number) => { + setDimensionLoading(true); + const { code, data, msg } = await getDimensionList({ modelId }); + setDimensionLoading(false); + if (code === 200 && Array.isArray(data?.list)) { + setDimensionList(data.list); + } else { + message.error(msg); + } + }; + + const queryMetricList = async (modelId: number) => { + const { code, data, msg } = await queryMetric({ modelId }); + if (code === 200 && Array.isArray(data?.list)) { + setMetricList(data.list); + } else { + message.error(msg); + } + }; + + const handleConfirm = async () => { + const fieldsValue = await form.validateFields(); + const viewModelConfigsMap = configTableRef?.current.getViewModelConfigs() || {}; + + const queryData: ISemantic.IModelItem = { + ...formVals, + ...fieldsValue, + queryType, + dataSetDetail: { + dataSetModelConfigs: Object.values(viewModelConfigsMap), + }, + domainId, + }; + setFormVals(queryData); + setSaveLoading(true); + const { code, msg } = await (!queryData.id ? createView : updateView)(queryData); + setSaveLoading(false); + if (code === 200) { + onSubmit?.(queryData); + } else { + message.error(msg); + } + }; + + // const forward = () => { + // setModalWidth(stepWidth[`${currentStep + 1}`]); + // setCurrentStep(currentStep + 1); + // }; + // const backward = () => { + // setModalWidth(stepWidth[`${currentStep - 1}`]); + // setCurrentStep(currentStep - 1); + // }; + + // const handleNext = async () => { + // await form.validateFields(); + // forward(); + // }; + + // const renderFooter = () => { + // if (currentStep === 1) { + // return ( + // <> + // + // + // + // ); + // } + // return ( + // <> + // + // + // + // + // ); + // }; + + const renderContent = () => { + return ( + <> +
+ + + 切换模型: + + + + + + {/* { + return Array.isArray(value) ? value.join(',') : ''; + }} + getValueProps={(value) => { + return { + value: isString(value) ? value.split(',') : [], + }; + }} + > + - - - - - - - - - - - - - - - - - - - - - -