[improvement]Use QueryWrapper in place of hard-coded SQLs (#1944)
Some checks are pending
supersonic CentOS CI / build (11) (push) Waiting to run
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic CentOS CI / build (8) (push) Waiting to run
supersonic mac CI / build (11) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic mac CI / build (8) (push) Waiting to run
supersonic ubuntu CI / build (11) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (8) (push) Waiting to run
supersonic windows CI / build (11) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
supersonic windows CI / build (8) (push) Waiting to run

* [improvement][launcher]Use API to get element ID avoiding hard-code.

* [fix][launcher]Fix mysql scripts.

* [improvement][launcher]Support DuckDB database and refactor translator code structure.

* [improvement][headless-fe] Revamped the interaction for semantic modeling routing and successfully implemented the switching between dimension and dataset management.

* [improvement][Headless] Add table ddl in Dbschema

* [improvement][Headless] Add get database by type

* [improvement][Headless] Supports automatic batch creation of models based on db table names.

* [improvement][Headless] Supports getting domain by bizName

* [improvement][launcher]Refactor unit tests and demo data.

* [fix][launcher]Change default vector dimension to 512.

* [improvement](Dict) add dimValueAliasMap info for KnowledgeBaseService

* [improvement][headless]Use QueryWrapper to replace hard-code SQL in mapper xml.

* [improvement][chat]Introduce ChatMemory to delegate ChatMemoryDO.

* [fix][common]Fix embedding store sys configs.

* [fix][common]Fix postgres schema, using varchar instead of char.

* [improvement][launcher]Change supersonic docker deployment from mysql to postgres.

* [Fix][launcher]Fix a number of issues related to semantic modeling.

* [Fix][headless]Fix the evaluation logic of agg type.

* [fix][assembly]Fix Dockerfile and add docker compose run script.

* [fix][chat]Fix "multiple assignments to same column "similar_queries".

* [improvement][headless]Use LamdaQueryWrapper to avoid hard-coded column names.

* [improvement][headless]Refactor headless infra to support advanced semantic modelling.

* [improvement][headless]Change class name `Dim` to `Dimension`.

* [improvement][chat]Introduce `TimeFieldMapper` to always map time field.

* [fix][headless]Remove unnecessary dimension existence check.

* [fix][chat]Fix adjusted filters don't take effect.

---------
This commit is contained in:
Jun Zhang
2024-12-08 13:32:29 +08:00
committed by GitHub
parent 0fc29304a8
commit e55f43c737
120 changed files with 844 additions and 5810 deletions

View File

@@ -51,7 +51,7 @@ public class UserRepositoryImpl implements UserRepository {
@Override @Override
public List<UserTokenDO> getUserTokenListByName(String userName) { public List<UserTokenDO> getUserTokenListByName(String userName) {
QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>(); QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("user_name", userName); queryWrapper.lambda().eq(UserTokenDO::getUserName, userName);
return userTokenDOMapper.selectList(queryWrapper); return userTokenDOMapper.selectList(queryWrapper);
} }
@@ -68,7 +68,7 @@ public class UserRepositoryImpl implements UserRepository {
@Override @Override
public void deleteUserTokenByName(String userName) { public void deleteUserTokenByName(String userName) {
QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>(); QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("user_name", userName); queryWrapper.lambda().eq(UserTokenDO::getUserName, userName);
userTokenDOMapper.delete(queryWrapper); userTokenDOMapper.delete(queryWrapper);
} }

View File

@@ -4,9 +4,11 @@ import javax.validation.constraints.NotNull;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult; import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus; import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.Builder;
import lombok.Data; import lombok.Data;
@Data @Data
@Builder
public class ChatMemoryUpdateReq { public class ChatMemoryUpdateReq {
@NotNull(message = "id不可为空") @NotNull(message = "id不可为空")

View File

@@ -2,8 +2,8 @@ package com.tencent.supersonic.chat.server.executor;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus; import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.response.QueryResult; import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.pojo.ChatContext; import com.tencent.supersonic.chat.server.pojo.ChatContext;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.pojo.ExecuteContext; import com.tencent.supersonic.chat.server.pojo.ExecuteContext;
import com.tencent.supersonic.chat.server.service.ChatContextService; import com.tencent.supersonic.chat.server.service.ChatContextService;
import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.chat.server.service.MemoryService;
@@ -44,7 +44,7 @@ public class SqlExecutor implements ChatQueryExecutor {
Text2SQLExemplar.class); Text2SQLExemplar.class);
MemoryService memoryService = ContextUtils.getBean(MemoryService.class); MemoryService memoryService = ContextUtils.getBean(MemoryService.class);
memoryService.createMemory(ChatMemoryDO.builder() memoryService.createMemory(ChatMemory.builder()
.agentId(executeContext.getAgent().getId()).status(MemoryStatus.PENDING) .agentId(executeContext.getAgent().getId()).status(MemoryStatus.PENDING)
.question(exemplar.getQuestion()).sideInfo(exemplar.getSideInfo()) .question(exemplar.getQuestion()).sideInfo(exemplar.getSideInfo())
.dbSchema(exemplar.getDbSchema()).s2sql(exemplar.getSql()) .dbSchema(exemplar.getDbSchema()).s2sql(exemplar.getSql())

View File

@@ -1,9 +1,10 @@
package com.tencent.supersonic.chat.server.memory; package com.tencent.supersonic.chat.server.memory;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult; import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO; import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.AgentService; import com.tencent.supersonic.chat.server.service.AgentService;
import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.ChatApp;
@@ -66,7 +67,7 @@ public class MemoryReviewTask {
} }
ChatMemoryFilter chatMemoryFilter = ChatMemoryFilter chatMemoryFilter =
ChatMemoryFilter.builder().agentId(agent.getId()).build(); ChatMemoryFilter.builder().agentId(agent.getId()).build();
memoryService.getMemories(chatMemoryFilter).stream().forEach(memory -> { memoryService.getMemories(chatMemoryFilter).forEach(memory -> {
try { try {
processMemory(memory, agent); processMemory(memory, agent);
} catch (Exception e) { } catch (Exception e) {
@@ -77,23 +78,19 @@ public class MemoryReviewTask {
} }
} }
private void processMemory(ChatMemoryDO m, Agent agent) { private void processMemory(ChatMemory m, Agent agent) {
if (Objects.isNull(agent)) { if (Objects.isNull(agent)) {
log.warn("Agent id {} not found or memory review disabled", m.getAgentId()); log.warn("Agent id {} not found or memory review disabled", m.getAgentId());
return; return;
} }
ChatApp chatApp = agent.getChatAppConfig().get(APP_KEY); // if either LLM or human has reviewed, just return
if (Objects.isNull(chatApp) || !chatApp.isEnable()) { if (Objects.nonNull(m.getLlmReviewRet()) || Objects.nonNull(m.getHumanReviewRet())) {
return; return;
} }
// 如果大模型已经评估过,则不再评估 ChatApp chatApp = agent.getChatAppConfig().get(APP_KEY);
if (Objects.nonNull(m.getLlmReviewRet())) { if (Objects.isNull(chatApp) || !chatApp.isEnable()) {
// directly enable memory if the LLM determines it positive
if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) {
memoryService.enableMemory(m);
}
return; return;
} }
@@ -112,19 +109,19 @@ public class MemoryReviewTask {
} }
} }
private String createPromptString(ChatMemoryDO m, String promptTemplate) { private String createPromptString(ChatMemory m, String promptTemplate) {
return String.format(promptTemplate, m.getQuestion(), m.getDbSchema(), m.getSideInfo(), return String.format(promptTemplate, m.getQuestion(), m.getDbSchema(), m.getSideInfo(),
m.getS2sql()); m.getS2sql());
} }
private void processResponse(String response, ChatMemoryDO m) { private void processResponse(String response, ChatMemory m) {
Matcher matcher = OUTPUT_PATTERN.matcher(response); Matcher matcher = OUTPUT_PATTERN.matcher(response);
if (matcher.find()) { if (matcher.find()) {
m.setLlmReviewRet(MemoryReviewResult.getMemoryReviewResult(matcher.group(1))); m.setLlmReviewRet(MemoryReviewResult.getMemoryReviewResult(matcher.group(1)));
m.setLlmReviewCmt(matcher.group(2)); m.setLlmReviewCmt(matcher.group(2));
// directly enable memory if the LLM determines it positive // directly enable memory if the LLM determines it positive
if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) { if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) {
memoryService.enableMemory(m); m.setStatus(MemoryStatus.ENABLED);
} }
memoryService.updateMemory(m); memoryService.updateMemory(m);
} }

View File

@@ -91,6 +91,7 @@ public class NL2SQLParser implements ChatQueryParser {
// mapModes // mapModes
Set<Long> requestedDatasets = queryNLReq.getDataSetIds(); Set<Long> requestedDatasets = queryNLReq.getDataSetIds();
List<SemanticParseInfo> candidateParses = Lists.newArrayList(); List<SemanticParseInfo> candidateParses = Lists.newArrayList();
StringBuilder errMsg = new StringBuilder();
for (Long datasetId : requestedDatasets) { for (Long datasetId : requestedDatasets) {
queryNLReq.setDataSetIds(Collections.singleton(datasetId)); queryNLReq.setDataSetIds(Collections.singleton(datasetId));
ChatParseResp parseResp = new ChatParseResp(parseContext.getRequest().getQueryId()); ChatParseResp parseResp = new ChatParseResp(parseContext.getRequest().getQueryId());
@@ -104,6 +105,7 @@ public class NL2SQLParser implements ChatQueryParser {
doParse(queryNLReq, parseResp); doParse(queryNLReq, parseResp);
} }
if (parseResp.getSelectedParses().isEmpty()) { if (parseResp.getSelectedParses().isEmpty()) {
errMsg.append(parseResp.getErrorMsg());
continue; continue;
} }
// for one dataset select the top 1 parse after sorting // for one dataset select the top 1 parse after sorting
@@ -116,6 +118,10 @@ public class NL2SQLParser implements ChatQueryParser {
SemanticParseInfo.sort(candidateParses); SemanticParseInfo.sort(candidateParses);
parseContext.getResponse().setSelectedParses( parseContext.getResponse().setSelectedParses(
candidateParses.subList(0, Math.min(parserShowCount, candidateParses.size()))); candidateParses.subList(0, Math.min(parserShowCount, candidateParses.size())));
if (parseContext.getResponse().getSelectedParses().isEmpty()) {
parseContext.getResponse().setState(ParseResp.ParseState.FAILED);
parseContext.getResponse().setErrorMsg(errMsg.toString());
}
} }
// next go with llm-based parsers unless LLM is disabled or use feedback is needed. // next go with llm-based parsers unless LLM is disabled or use feedback is needed.

View File

@@ -4,17 +4,17 @@ import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName; import com.baomidou.mybatisplus.annotation.TableName;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult; import lombok.AllArgsConstructor;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;
import lombok.ToString; import lombok.NoArgsConstructor;
import java.util.Date; import java.util.Date;
@Data @Data
@Builder @Builder
@ToString @NoArgsConstructor
@AllArgsConstructor
@TableName("s2_chat_memory") @TableName("s2_chat_memory")
public class ChatMemoryDO { public class ChatMemoryDO {
@TableId(type = IdType.AUTO) @TableId(type = IdType.AUTO)
@@ -36,16 +36,16 @@ public class ChatMemoryDO {
private String s2sql; private String s2sql;
@TableField("status") @TableField("status")
private MemoryStatus status; private String status;
@TableField("llm_review") @TableField("llm_review")
private MemoryReviewResult llmReviewRet; private String llmReviewRet;
@TableField("llm_comment") @TableField("llm_comment")
private String llmReviewCmt; private String llmReviewCmt;
@TableField("human_review") @TableField("human_review")
private MemoryReviewResult humanReviewRet; private String humanReviewRet;
@TableField("human_comment") @TableField("human_comment")
private String humanReviewCmt; private String humanReviewCmt;

View File

@@ -20,7 +20,6 @@ import com.tencent.supersonic.chat.server.persistence.repository.ChatQueryReposi
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.common.util.PageUtils; import com.tencent.supersonic.common.util.PageUtils;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.api.pojo.response.ParseTimeCostResp; import com.tencent.supersonic.headless.api.pojo.response.ParseTimeCostResp;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;

View File

@@ -0,0 +1,48 @@
package com.tencent.supersonic.chat.server.pojo;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Date;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class ChatMemory {
private Long id;
private Integer agentId;
private String question;
private String sideInfo;
private String dbSchema;
private String s2sql;
private MemoryStatus status;
private MemoryReviewResult llmReviewRet;
private String llmReviewCmt;
private MemoryReviewResult humanReviewRet;
private String humanReviewCmt;
private String createdBy;
private Date createdAt;
private String updatedBy;
private Date updatedAt;
}

View File

@@ -56,8 +56,7 @@ public class QueryRecommendProcessor implements ParseResultProcessor {
private void updateChatQuery(ChatQueryDO chatQueryDO) { private void updateChatQuery(ChatQueryDO chatQueryDO) {
ChatQueryRepository chatQueryRepository = ContextUtils.getBean(ChatQueryRepository.class); ChatQueryRepository chatQueryRepository = ContextUtils.getBean(ChatQueryRepository.class);
UpdateWrapper<ChatQueryDO> updateWrapper = new UpdateWrapper<>(); UpdateWrapper<ChatQueryDO> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("question_id", chatQueryDO.getQuestionId()); updateWrapper.lambda().eq(ChatQueryDO::getQuestionId, chatQueryDO.getQuestionId());
updateWrapper.set("similar_queries", chatQueryDO.getSimilarQueries());
chatQueryRepository.updateChatQuery(chatQueryDO, updateWrapper); chatQueryRepository.updateChatQuery(chatQueryDO, updateWrapper);
} }
} }

View File

@@ -9,7 +9,7 @@ import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryCreateReq; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryCreateReq;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq; import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO; import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq; import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq;
@@ -32,7 +32,7 @@ public class MemoryController {
public Boolean createMemory(@RequestBody ChatMemoryCreateReq chatMemoryCreateReq, public Boolean createMemory(@RequestBody ChatMemoryCreateReq chatMemoryCreateReq,
HttpServletRequest request, HttpServletResponse response) { HttpServletRequest request, HttpServletResponse response) {
User user = UserHolder.findUser(request, response); User user = UserHolder.findUser(request, response);
memoryService.createMemory(ChatMemoryDO.builder().agentId(chatMemoryCreateReq.getAgentId()) memoryService.createMemory(ChatMemory.builder().agentId(chatMemoryCreateReq.getAgentId())
.s2sql(chatMemoryCreateReq.getS2sql()).question(chatMemoryCreateReq.getQuestion()) .s2sql(chatMemoryCreateReq.getS2sql()).question(chatMemoryCreateReq.getQuestion())
.dbSchema(chatMemoryCreateReq.getDbSchema()).status(chatMemoryCreateReq.getStatus()) .dbSchema(chatMemoryCreateReq.getDbSchema()).status(chatMemoryCreateReq.getStatus())
.humanReviewRet(MemoryReviewResult.POSITIVE).createdBy(user.getName()) .humanReviewRet(MemoryReviewResult.POSITIVE).createdBy(user.getName())
@@ -49,7 +49,7 @@ public class MemoryController {
} }
@RequestMapping("/pageMemories") @RequestMapping("/pageMemories")
public PageInfo<ChatMemoryDO> pageMemories(@RequestBody PageMemoryReq pageMemoryReq) { public PageInfo<ChatMemory> pageMemories(@RequestBody PageMemoryReq pageMemoryReq) {
return memoryService.pageMemories(pageMemoryReq); return memoryService.pageMemories(pageMemoryReq);
} }

View File

@@ -4,27 +4,22 @@ import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq; import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO; import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import java.util.List; import java.util.List;
public interface MemoryService { public interface MemoryService {
void createMemory(ChatMemoryDO memory); void createMemory(ChatMemory memory);
void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user); void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user);
void updateMemory(ChatMemoryDO memory); void updateMemory(ChatMemory memory);
void enableMemory(ChatMemoryDO memory);
void disableMemory(ChatMemoryDO memory);
void batchDelete(List<Long> ids); void batchDelete(List<Long> ids);
PageInfo<ChatMemoryDO> pageMemories(PageMemoryReq pageMemoryReq); PageInfo<ChatMemory> pageMemories(PageMemoryReq pageMemoryReq);
List<ChatMemoryDO> getMemories(ChatMemoryFilter chatMemoryFilter); List<ChatMemory> getMemories(ChatMemoryFilter chatMemoryFilter);
List<ChatMemoryDO> getMemoriesForLlmReview();
} }

View File

@@ -6,8 +6,8 @@ import com.tencent.supersonic.chat.api.pojo.request.ChatParseReq;
import com.tencent.supersonic.chat.server.agent.Agent; import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.agent.VisualConfig; import com.tencent.supersonic.chat.server.agent.VisualConfig;
import com.tencent.supersonic.chat.server.persistence.dataobject.AgentDO; import com.tencent.supersonic.chat.server.persistence.dataobject.AgentDO;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.persistence.mapper.AgentDOMapper; import com.tencent.supersonic.chat.server.persistence.mapper.AgentDOMapper;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.AgentService; import com.tencent.supersonic.chat.server.service.AgentService;
import com.tencent.supersonic.chat.server.service.ChatQueryService; import com.tencent.supersonic.chat.server.service.ChatQueryService;
import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.chat.server.service.MemoryService;
@@ -121,7 +121,7 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
ChatMemoryFilter chatMemoryFilter = ChatMemoryFilter chatMemoryFilter =
ChatMemoryFilter.builder().agentId(agent.getId()).questions(examples).build(); ChatMemoryFilter.builder().agentId(agent.getId()).questions(examples).build();
List<String> memoriesExisted = memoryService.getMemories(chatMemoryFilter).stream() List<String> memoriesExisted = memoryService.getMemories(chatMemoryFilter).stream()
.map(ChatMemoryDO::getQuestion).collect(Collectors.toList()); .map(ChatMemory::getQuestion).collect(Collectors.toList());
for (String example : examples) { for (String example : examples) {
if (memoriesExisted.contains(example)) { if (memoriesExisted.contains(example)) {
continue; continue;

View File

@@ -18,11 +18,7 @@ import com.tencent.supersonic.chat.server.service.ChatManageService;
import com.tencent.supersonic.chat.server.service.ChatQueryService; import com.tencent.supersonic.chat.server.service.ChatQueryService;
import com.tencent.supersonic.chat.server.util.ComponentFactory; import com.tencent.supersonic.chat.server.util.ComponentFactory;
import com.tencent.supersonic.chat.server.util.QueryReqConverter; import com.tencent.supersonic.chat.server.util.QueryReqConverter;
import com.tencent.supersonic.common.jsqlparser.FieldExpression; import com.tencent.supersonic.common.jsqlparser.*;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.util.DateUtils; import com.tencent.supersonic.common.util.DateUtils;
@@ -48,11 +44,7 @@ import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.LongValue; import net.sf.jsqlparser.expression.LongValue;
import net.sf.jsqlparser.expression.StringValue; import net.sf.jsqlparser.expression.StringValue;
import net.sf.jsqlparser.expression.operators.relational.ComparisonOperator; import net.sf.jsqlparser.expression.operators.relational.*;
import net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals;
import net.sf.jsqlparser.expression.operators.relational.InExpression;
import net.sf.jsqlparser.expression.operators.relational.MinorThanEquals;
import net.sf.jsqlparser.expression.operators.relational.ParenthesedExpressionList;
import net.sf.jsqlparser.schema.Column; import net.sf.jsqlparser.schema.Column;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
@@ -60,14 +52,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Slf4j @Slf4j
@@ -210,21 +195,23 @@ public class ChatQueryServiceImpl implements ChatQueryService {
private void handleLLMQueryMode(ChatQueryDataReq chatQueryDataReq, SemanticQuery semanticQuery, private void handleLLMQueryMode(ChatQueryDataReq chatQueryDataReq, SemanticQuery semanticQuery,
DataSetSchema dataSetSchema, User user) throws Exception { DataSetSchema dataSetSchema, User user) throws Exception {
SemanticParseInfo parseInfo = semanticQuery.getParseInfo(); SemanticParseInfo parseInfo = semanticQuery.getParseInfo();
List<String> fields = getFieldsFromSql(parseInfo); String rebuiltS2SQL;
if (checkMetricReplace(fields, chatQueryDataReq.getMetrics())) { if (checkMetricReplace(chatQueryDataReq, parseInfo)) {
log.info("llm begin replace metrics!"); log.info("rebuild S2SQL with adjusted metrics!");
SchemaElement metricToReplace = chatQueryDataReq.getMetrics().iterator().next(); SchemaElement metricToReplace = chatQueryDataReq.getMetrics().iterator().next();
replaceMetrics(parseInfo, metricToReplace); rebuiltS2SQL = replaceMetrics(parseInfo, metricToReplace);
} else { } else {
log.info("llm begin revise filters!"); log.info("rebuild S2SQL with adjusted filters!");
String correctorSql = reviseCorrectS2SQL(chatQueryDataReq, parseInfo, dataSetSchema); rebuiltS2SQL = replaceFilters(chatQueryDataReq, parseInfo, dataSetSchema);
parseInfo.getSqlInfo().setCorrectedS2SQL(correctorSql); }
semanticQuery.setParseInfo(parseInfo); // reset SqlInfo and request re-translation
parseInfo.getSqlInfo().setCorrectedS2SQL(rebuiltS2SQL);
parseInfo.getSqlInfo().setParsedS2SQL(rebuiltS2SQL);
parseInfo.getSqlInfo().setQuerySQL(null);
SemanticQueryReq semanticQueryReq = semanticQuery.buildSemanticQueryReq(); SemanticQueryReq semanticQueryReq = semanticQuery.buildSemanticQueryReq();
SemanticTranslateResp explain = semanticLayerService.translate(semanticQueryReq, user); SemanticTranslateResp explain = semanticLayerService.translate(semanticQueryReq, user);
parseInfo.getSqlInfo().setQuerySQL(explain.getQuerySQL()); parseInfo.getSqlInfo().setQuerySQL(explain.getQuerySQL());
} }
}
private void handleRuleQueryMode(SemanticQuery semanticQuery, DataSetSchema dataSetSchema, private void handleRuleQueryMode(SemanticQuery semanticQuery, DataSetSchema dataSetSchema,
User user) { User user) {
@@ -243,7 +230,9 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return queryResult; return queryResult;
} }
private boolean checkMetricReplace(List<String> oriFields, Set<SchemaElement> metrics) { private boolean checkMetricReplace(ChatQueryDataReq chatQueryDataReq, SemanticParseInfo parseInfo) {
List<String> oriFields = getFieldsFromSql(parseInfo);
Set<SchemaElement> metrics = chatQueryDataReq.getMetrics();
if (CollectionUtils.isEmpty(oriFields) || CollectionUtils.isEmpty(metrics)) { if (CollectionUtils.isEmpty(oriFields) || CollectionUtils.isEmpty(metrics)) {
return false; return false;
} }
@@ -252,7 +241,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return !oriFields.containsAll(metricNames); return !oriFields.containsAll(metricNames);
} }
private String reviseCorrectS2SQL(ChatQueryDataReq queryData, SemanticParseInfo parseInfo, private String replaceFilters(ChatQueryDataReq queryData, SemanticParseInfo parseInfo,
DataSetSchema dataSetSchema) { DataSetSchema dataSetSchema) {
String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL(); String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL();
log.info("correctorSql before replacing:{}", correctorSql); log.info("correctorSql before replacing:{}", correctorSql);
@@ -290,7 +279,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return correctorSql; return correctorSql;
} }
private void replaceMetrics(SemanticParseInfo parseInfo, SchemaElement metric) { private String replaceMetrics(SemanticParseInfo parseInfo, SchemaElement metric) {
List<String> oriMetrics = parseInfo.getMetrics().stream().map(SchemaElement::getName) List<String> oriMetrics = parseInfo.getMetrics().stream().map(SchemaElement::getName)
.collect(Collectors.toList()); .collect(Collectors.toList());
String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL(); String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL();
@@ -302,7 +291,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
correctorSql = SqlReplaceHelper.replaceAggFields(correctorSql, fieldMap); correctorSql = SqlReplaceHelper.replaceAggFields(correctorSql, fieldMap);
} }
log.info("after replaceMetrics:{}", correctorSql); log.info("after replaceMetrics:{}", correctorSql);
parseInfo.getSqlInfo().setCorrectedS2SQL(correctorSql); return correctorSql;
} }
private QueryResult doExecution(SemanticQueryReq semanticQueryReq, String queryMode, User user) private QueryResult doExecution(SemanticQueryReq semanticQueryReq, String queryMode, User user)
@@ -477,6 +466,9 @@ public class ChatQueryServiceImpl implements ChatQueryService {
} }
private void mergeParseInfo(SemanticParseInfo parseInfo, ChatQueryDataReq queryData) { private void mergeParseInfo(SemanticParseInfo parseInfo, ChatQueryDataReq queryData) {
if (Objects.nonNull(queryData.getDateInfo())) {
parseInfo.setDateInfo(queryData.getDateInfo());
}
if (LLMSqlQuery.QUERY_MODE.equals(parseInfo.getQueryMode())) { if (LLMSqlQuery.QUERY_MODE.equals(parseInfo.getQueryMode())) {
return; return;
} }
@@ -492,9 +484,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
if (!CollectionUtils.isEmpty(queryData.getMetricFilters())) { if (!CollectionUtils.isEmpty(queryData.getMetricFilters())) {
parseInfo.setMetricFilters(queryData.getMetricFilters()); parseInfo.setMetricFilters(queryData.getMetricFilters());
} }
if (Objects.nonNull(queryData.getDateInfo())) {
parseInfo.setDateInfo(queryData.getDateInfo());
}
parseInfo.setSqlInfo(new SqlInfo()); parseInfo.setSqlInfo(new SqlInfo());
} }

View File

@@ -3,12 +3,14 @@ package com.tencent.supersonic.chat.server.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo; import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus; import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq; import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq; import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO; import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.persistence.repository.ChatMemoryRepository; import com.tencent.supersonic.chat.server.persistence.repository.ChatMemoryRepository;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.MemoryService; import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.config.EmbeddingConfig; import com.tencent.supersonic.common.config.EmbeddingConfig;
import com.tencent.supersonic.common.pojo.Text2SQLExemplar; import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
@@ -16,12 +18,15 @@ import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.service.ExemplarService; import com.tencent.supersonic.common.service.ExemplarService;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Service @Service
public class MemoryServiceImpl implements MemoryService { public class MemoryServiceImpl implements MemoryService {
@@ -36,20 +41,22 @@ public class MemoryServiceImpl implements MemoryService {
private EmbeddingConfig embeddingConfig; private EmbeddingConfig embeddingConfig;
@Override @Override
public void createMemory(ChatMemoryDO memory) { public void createMemory(ChatMemory memory) {
// if an existing enabled memory has the same question, just skip // if an existing enabled memory has the same question, just skip
List<ChatMemoryDO> memories = List<ChatMemory> memories =
getMemories(ChatMemoryFilter.builder().agentId(memory.getAgentId()) getMemories(ChatMemoryFilter.builder().agentId(memory.getAgentId())
.question(memory.getQuestion()).status(MemoryStatus.ENABLED).build()); .question(memory.getQuestion()).status(MemoryStatus.ENABLED).build());
if (memories.size() == 0) { if (memories.isEmpty()) {
chatMemoryRepository.createMemory(memory); ChatMemoryDO memoryDO = getMemoryDO(memory);
chatMemoryRepository.createMemory(memoryDO);
} }
} }
@Override @Override
public void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user) { public void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user) {
ChatMemoryDO chatMemoryDO = chatMemoryRepository.getMemory(chatMemoryUpdateReq.getId()); ChatMemoryDO chatMemoryDO = chatMemoryRepository.getMemory(chatMemoryUpdateReq.getId());
boolean hadEnabled = MemoryStatus.ENABLED.equals(chatMemoryDO.getStatus()); boolean hadEnabled =
MemoryStatus.ENABLED.toString().equals(chatMemoryDO.getStatus().trim());
chatMemoryDO.setUpdatedBy(user.getName()); chatMemoryDO.setUpdatedBy(user.getName());
chatMemoryDO.setUpdatedAt(new Date()); chatMemoryDO.setUpdatedAt(new Date());
BeanMapper.mapper(chatMemoryUpdateReq, chatMemoryDO); BeanMapper.mapper(chatMemoryUpdateReq, chatMemoryDO);
@@ -58,12 +65,12 @@ public class MemoryServiceImpl implements MemoryService {
} else if (MemoryStatus.DISABLED.equals(chatMemoryUpdateReq.getStatus()) && hadEnabled) { } else if (MemoryStatus.DISABLED.equals(chatMemoryUpdateReq.getStatus()) && hadEnabled) {
disableMemory(chatMemoryDO); disableMemory(chatMemoryDO);
} }
updateMemory(chatMemoryDO); chatMemoryRepository.updateMemory(chatMemoryDO);
} }
@Override @Override
public void updateMemory(ChatMemoryDO memory) { public void updateMemory(ChatMemory memory) {
chatMemoryRepository.updateMemory(memory); chatMemoryRepository.updateMemory(getMemoryDO(memory));
} }
@Override @Override
@@ -72,7 +79,7 @@ public class MemoryServiceImpl implements MemoryService {
} }
@Override @Override
public PageInfo<ChatMemoryDO> pageMemories(PageMemoryReq pageMemoryReq) { public PageInfo<ChatMemory> pageMemories(PageMemoryReq pageMemoryReq) {
ChatMemoryFilter chatMemoryFilter = pageMemoryReq.getChatMemoryFilter(); ChatMemoryFilter chatMemoryFilter = pageMemoryReq.getChatMemoryFilter();
chatMemoryFilter.setSort(pageMemoryReq.getSort()); chatMemoryFilter.setSort(pageMemoryReq.getSort());
chatMemoryFilter.setOrderCondition(pageMemoryReq.getOrderCondition()); chatMemoryFilter.setOrderCondition(pageMemoryReq.getOrderCondition());
@@ -81,7 +88,7 @@ public class MemoryServiceImpl implements MemoryService {
} }
@Override @Override
public List<ChatMemoryDO> getMemories(ChatMemoryFilter chatMemoryFilter) { public List<ChatMemory> getMemories(ChatMemoryFilter chatMemoryFilter) {
QueryWrapper<ChatMemoryDO> queryWrapper = new QueryWrapper<>(); QueryWrapper<ChatMemoryDO> queryWrapper = new QueryWrapper<>();
if (chatMemoryFilter.getAgentId() != null) { if (chatMemoryFilter.getAgentId() != null) {
queryWrapper.lambda().eq(ChatMemoryDO::getAgentId, chatMemoryFilter.getAgentId()); queryWrapper.lambda().eq(ChatMemoryDO::getAgentId, chatMemoryFilter.getAgentId());
@@ -109,32 +116,52 @@ public class MemoryServiceImpl implements MemoryService {
queryWrapper.orderBy(true, chatMemoryFilter.isAsc(), queryWrapper.orderBy(true, chatMemoryFilter.isAsc(),
chatMemoryFilter.getOrderCondition()); chatMemoryFilter.getOrderCondition());
} }
return chatMemoryRepository.getMemories(queryWrapper); List<ChatMemoryDO> chatMemoryDOS = chatMemoryRepository.getMemories(queryWrapper);
return chatMemoryDOS.stream().map(this::getMemory).collect(Collectors.toList());
} }
@Override private void enableMemory(ChatMemoryDO memory) {
public List<ChatMemoryDO> getMemoriesForLlmReview() { memory.setStatus(MemoryStatus.ENABLED.toString());
QueryWrapper<ChatMemoryDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().eq(ChatMemoryDO::getStatus, MemoryStatus.PENDING)
.isNull(ChatMemoryDO::getLlmReviewRet);
return chatMemoryRepository.getMemories(queryWrapper);
}
@Override
public void enableMemory(ChatMemoryDO memory) {
memory.setStatus(MemoryStatus.ENABLED);
exemplarService.storeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()), exemplarService.storeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()),
Text2SQLExemplar.builder().question(memory.getQuestion()) Text2SQLExemplar.builder().question(memory.getQuestion())
.sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema()) .sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema())
.sql(memory.getS2sql()).build()); .sql(memory.getS2sql()).build());
} }
@Override private void disableMemory(ChatMemoryDO memory) {
public void disableMemory(ChatMemoryDO memory) { memory.setStatus(MemoryStatus.DISABLED.toString());
memory.setStatus(MemoryStatus.DISABLED);
exemplarService.removeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()), exemplarService.removeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()),
Text2SQLExemplar.builder().question(memory.getQuestion()) Text2SQLExemplar.builder().question(memory.getQuestion())
.sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema()) .sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema())
.sql(memory.getS2sql()).build()); .sql(memory.getS2sql()).build());
} }
private ChatMemoryDO getMemoryDO(ChatMemory memory) {
ChatMemoryDO memoryDO = new ChatMemoryDO();
BeanUtils.copyProperties(memory, memoryDO);
memoryDO.setStatus(memory.getStatus().toString().trim());
if (Objects.nonNull(memory.getHumanReviewRet())) {
memoryDO.setHumanReviewRet(memory.getHumanReviewRet().toString().trim());
}
if (Objects.nonNull(memory.getLlmReviewRet())) {
memoryDO.setLlmReviewRet(memory.getLlmReviewRet().toString().trim());
}
return memoryDO;
}
private ChatMemory getMemory(ChatMemoryDO memoryDO) {
ChatMemory memory = new ChatMemory();
BeanUtils.copyProperties(memoryDO, memory);
memory.setStatus(MemoryStatus.valueOf(memoryDO.getStatus().trim()));
if (Objects.nonNull(memoryDO.getHumanReviewRet())) {
memory.setHumanReviewRet(
MemoryReviewResult.valueOf(memoryDO.getHumanReviewRet().trim()));
}
if (Objects.nonNull(memoryDO.getLlmReviewRet())) {
memory.setLlmReviewRet(MemoryReviewResult.valueOf(memoryDO.getLlmReviewRet().trim()));
}
return memory;
}
} }

View File

@@ -19,19 +19,6 @@ public class Term {
this.nature = nature; this.nature = nature;
} }
public Term(String word, Nature nature, int offset) {
this.word = word;
this.nature = nature;
this.offset = offset;
}
public Term(String word, Nature nature, int offset, int frequency) {
this.word = word;
this.nature = nature;
this.offset = offset;
this.frequency = frequency;
}
public int length() { public int length() {
return this.word.length(); return this.word.length();
} }

View File

@@ -38,14 +38,14 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
new Parameter("s2.embedding.store.timeout", "60", "超时时间(秒)", "", "number", MODULE_NAME); new Parameter("s2.embedding.store.timeout", "60", "超时时间(秒)", "", "number", MODULE_NAME);
public static final Parameter EMBEDDING_STORE_DIMENSION = public static final Parameter EMBEDDING_STORE_DIMENSION =
new Parameter("s2.embedding.store.dimension", "", "", "", "number", MODULE_NAME, null, new Parameter("s2.embedding.store.dimension", "", "向量维", "", "number", MODULE_NAME,
getDimensionDependency()); null, getDimensionDependency());
public static final Parameter EMBEDDING_STORE_DATABASE_NAME = public static final Parameter EMBEDDING_STORE_DATABASE_NAME =
new Parameter("s2.embedding.store.databaseName", "", "DatabaseName", "", "string", new Parameter("s2.embedding.store.databaseName", "", "DatabaseName", "", "string",
MODULE_NAME, null, getDatabaseNameDependency()); MODULE_NAME, null, getDatabaseNameDependency());
public static final Parameter EMBEDDING_STORE_POST = new Parameter("s2.embedding.store.post", public static final Parameter EMBEDDING_STORE_POST = new Parameter("s2.embedding.store.port",
"", "端口", "", "number", MODULE_NAME, null, getPostDependency()); "", "端口", "", "number", MODULE_NAME, null, getPortDependency());
public static final Parameter EMBEDDING_STORE_USER = new Parameter("s2.embedding.store.user", public static final Parameter EMBEDDING_STORE_USER = new Parameter("s2.embedding.store.user",
"", "用户名", "", "string", MODULE_NAME, null, getUserDependency()); "", "用户名", "", "string", MODULE_NAME, null, getUserDependency());
@@ -101,10 +101,8 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static List<Parameter.Dependency> getApiKeyDependency() { private static List<Parameter.Dependency> getApiKeyDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(), return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(), Lists.newArrayList(EmbeddingStoreType.MILVUS.name()),
EmbeddingStoreType.PGVECTOR.name()), ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), DEMO));
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), DEMO,
EmbeddingStoreType.PGVECTOR.name(), DEMO));
} }
private static List<Parameter.Dependency> getPathDependency() { private static List<Parameter.Dependency> getPathDependency() {
@@ -118,7 +116,7 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(), Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()), EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "384", ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "384",
EmbeddingStoreType.PGVECTOR.name(), "768")); EmbeddingStoreType.PGVECTOR.name(), "512"));
} }
private static List<Parameter.Dependency> getDatabaseNameDependency() { private static List<Parameter.Dependency> getDatabaseNameDependency() {
@@ -129,7 +127,7 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
EmbeddingStoreType.PGVECTOR.name(), "postgres")); EmbeddingStoreType.PGVECTOR.name(), "postgres"));
} }
private static List<Parameter.Dependency> getPostDependency() { private static List<Parameter.Dependency> getPortDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(), return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.PGVECTOR.name()), Lists.newArrayList(EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.PGVECTOR.name(), "54333")); ImmutableMap.of(EmbeddingStoreType.PGVECTOR.name(), "54333"));
@@ -140,12 +138,14 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(), Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()), EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus", ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "pgvector")); EmbeddingStoreType.PGVECTOR.name(), "postgres"));
} }
private static List<Parameter.Dependency> getPasswordDependency() { private static List<Parameter.Dependency> getPasswordDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(), return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name()), Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus")); EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
} }
} }

View File

@@ -8,7 +8,6 @@ import lombok.Data;
@Builder @Builder
public class DataItem { public class DataItem {
/** * This field uses an underscore (_) at the end. */
private String id; private String id;
private String bizName; private String bizName;
@@ -19,9 +18,10 @@ public class DataItem {
private TypeEnums type; private TypeEnums type;
/** * This field uses an underscore (_) at the end. */
private String modelId; private String modelId;
private String domainId;
private String defaultAgg; private String defaultAgg;
public String getNewName() { public String getNewName() {

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.common.pojo; package com.tencent.supersonic.common.pojo;
import com.google.common.collect.Lists;
import lombok.Data; import lombok.Data;
import java.util.List; import java.util.List;
@@ -18,5 +19,5 @@ public class ModelRela extends RecordInfo {
// left join, inner join, right join, outer join // left join, inner join, right join, outer join
private String joinType; private String joinType;
private List<JoinCondition> joinConditions; private List<JoinCondition> joinConditions = Lists.newArrayList();
} }

View File

@@ -1,5 +1,5 @@
package com.tencent.supersonic.common.pojo.enums; package com.tencent.supersonic.common.pojo.enums;
public enum TypeEnums { public enum TypeEnums {
METRIC, DIMENSION, TAG_OBJECT, TAG, DOMAIN, DATASET, MODEL, UNKNOWN METRIC, DIMENSION, TAG, DOMAIN, DATASET, MODEL, UNKNOWN
} }

View File

@@ -26,7 +26,7 @@ public class PgvectorEmbeddingStoreFactory extends BaseEmbeddingStoreFactory {
embeddingStore.setPort(storeConfig.getPost()); embeddingStore.setPort(storeConfig.getPost());
embeddingStore.setDatabase(storeConfig.getDatabaseName()); embeddingStore.setDatabase(storeConfig.getDatabaseName());
embeddingStore.setUser(storeConfig.getUser()); embeddingStore.setUser(storeConfig.getUser());
embeddingStore.setPassword(storeConfig.getApiKey()); embeddingStore.setPassword(storeConfig.getPassword());
return embeddingStore; return embeddingStore;
} }

View File

@@ -1,6 +1,7 @@
package dev.langchain4j.store.embedding; package dev.langchain4j.store.embedding;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataItem; import com.tencent.supersonic.common.pojo.DataItem;
import dev.langchain4j.data.document.Metadata; import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.data.segment.TextSegment;
@@ -17,10 +18,18 @@ public class TextSegmentConvert {
public static final String QUERY_ID = "queryId"; public static final String QUERY_ID = "queryId";
public static List<TextSegment> convertToEmbedding(List<DataItem> dataItems) { public static List<TextSegment> convertToEmbedding(List<DataItem> dataItems) {
return dataItems.stream().map(dataItem -> { return dataItems.stream().map(item -> {
Map meta = JSONObject.parseObject(JSONObject.toJSONString(dataItem), Map.class); // suffix with underscore to avoid embedding issue
TextSegment textSegment = TextSegment.from(dataItem.getName(), new Metadata(meta)); DataItem newItem = DataItem.builder().domainId(item.getDomainId())
addQueryId(textSegment, dataItem.getId() + dataItem.getType().name().toLowerCase()); .bizName(item.getBizName()).type(item.getType()).newName(item.getNewName())
.defaultAgg(item.getDefaultAgg()).name(item.getName())
.id(item.getId() + Constants.UNDERLINE)
.modelId(item.getModelId() + Constants.UNDERLINE)
.domainId(item.getDomainId() + Constants.UNDERLINE).build();
Map meta = JSONObject.parseObject(JSONObject.toJSONString(newItem), Map.class);
TextSegment textSegment = TextSegment.from(newItem.getName(), new Metadata(meta));
addQueryId(textSegment, newItem.getId() + newItem.getType().name().toLowerCase());
return textSegment; return textSegment;
}).collect(Collectors.toList()); }).collect(Collectors.toList());
} }

View File

@@ -7,14 +7,13 @@ WORKDIR /usr/src/app
# Argument to pass in the supersonic version at build time # Argument to pass in the supersonic version at build time
ARG SUPERSONIC_VERSION ARG SUPERSONIC_VERSION
# Install necessary packages, including MySQL client RUN apt-get update
RUN apt-get update && \
apt-get install -y default-mysql-client unzip && \ # Install necessary packages, including Postgres client
rm -rf /var/lib/apt/lists/* RUN apt-get update && apt-get install -y postgresql-client
# Install the vim editor. # Install the vim editor.
RUN apt-get update && \ RUN apt-get update && apt-get install -y vim && \
apt-get install -y vim && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
# Update the package list and install iputils-ping. # Update the package list and install iputils-ping.
@@ -40,4 +39,4 @@ WORKDIR /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION}
EXPOSE 9080 EXPOSE 9080
# Command to run the supersonic daemon # Command to run the supersonic daemon
RUN chmod +x bin/supersonic-daemon.sh RUN chmod +x bin/supersonic-daemon.sh
CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone prd && tail -f /dev/null"] CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone docker && tail -f /dev/null"]

32
docker/DockerfileS2 Normal file
View File

@@ -0,0 +1,32 @@
# Use an official OpenJDK runtime as a parent image
FROM supersonicbi/supersonic:0.9.8
# Set the working directory in the container
WORKDIR /usr/src/app
# Argument to pass in the supersonic version at build time
ARG SUPERSONIC_VERSION
# Install necessary packages, including Postgres client
RUN apt-get install -y postgresql-client
RUN rm /usr/src/app/supersonic-standalone-latest
# Copy the supersonic standalone zip file into the container
COPY assembly/build/supersonic-standalone-${SUPERSONIC_VERSION}.zip .
# Unzip the supersonic standalone zip
RUN unzip supersonic-standalone-${SUPERSONIC_VERSION}.zip && \
rm supersonic-standalone-${SUPERSONIC_VERSION}.zip
# Create a symbolic link to the supersonic installation directory
RUN ln -s /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION} /usr/src/app/supersonic-standalone-latest
# Set the working directory to the supersonic installation directory
WORKDIR /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION}
# Expose the default port
EXPOSE 9080
# Command to run the supersonic daemon
RUN chmod +x bin/supersonic-daemon.sh
CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone docker && tail -f /dev/null"]

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env bash
SUPERSONIC_VERSION=0.9.10-SNAPSHOT docker-compose -f docker-compose.yml -p supersonic up

View File

@@ -1,51 +1,28 @@
services: services:
chroma: postgres:
image: chromadb/chroma:0.5.3 image: pgvector/pgvector:pg17
privileged: true privileged: true
container_name: supersonic_chroma container_name: supersonic_postgres
ports:
- "8000:8000"
volumes:
- chroma_data:/chroma
networks:
- supersonic_network
dns:
- 114.114.114.114
- 8.8.8.8
- 8.8.4.4
healthcheck:
test: ["CMD", "curl", "http://0.0.0.0:8000"]
interval: 10s
timeout: 5s
retries: 10
mysql:
image: mysql:8.0
privileged: true
container_name: supersonic_mysql
environment: environment:
LANG: 'C.UTF-8' # 设置环境变量 LANG: 'C.UTF-8' # 设置环境变量
MYSQL_ROOT_PASSWORD: root_password POSTGRES_ROOT_PASSWORD: root_password
MYSQL_DATABASE: supersonic_db POSTGRES_DATABASE: postgres
MYSQL_USER: supersonic_user POSTGRES_USER: supersonic_user
MYSQL_PASSWORD: supersonic_password POSTGRES_PASSWORD: supersonic_password
ports: ports:
- "13306:3306" - "15432:5432"
volumes: volumes:
- mysql_data:/var/lib/mysql - postgres_data:/var/lib/postgresql
networks: networks:
- supersonic_network - supersonic_network
dns: dns:
- 114.114.114.114 - 114.114.114.114
- 8.8.8.8 - 8.8.8.8
- 8.8.4.4 - 8.8.4.4
depends_on:
chroma:
condition: service_healthy
healthcheck: healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] test: ["CMD-SHELL", "sh -c 'pg_isready -U supersonic_user -d postgres'"]
interval: 10s interval: 30s
timeout: 5s timeout: 10s
retries: 5 retries: 5
db_init: db_init:
@@ -53,22 +30,21 @@ services:
privileged: true privileged: true
container_name: supersonic_db_init container_name: supersonic_db_init
depends_on: depends_on:
mysql: postgres:
condition: service_healthy condition: service_healthy
networks: networks:
- supersonic_network - supersonic_network
command: > command: >
sh -c " sh -c "
sleep 15 && if ! PGPASSWORD=supersonic_password psql -h supersonic_postgres -U supersonic_user -d postgres -c 'select * from s2_database limit 1' > /dev/null;
if ! mysql -h supersonic_mysql -usupersonic_user -psupersonic_password -e 'use supersonic_db; show tables;' | grep -q 's2_database'; then then
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-mysql.sql && PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres.sql
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-mysql-demo.sql && PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres-demo.sql
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-mysql.sql && PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres.sql
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-mysql-demo.sql PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres-demo.sql
else else
echo 'Database already initialized.' echo 'Database already initialized.'
fi fi"
"
dns: dns:
- 114.114.114.114 - 114.114.114.114
- 8.8.8.8 - 8.8.8.8
@@ -79,17 +55,14 @@ services:
privileged: true privileged: true
container_name: supersonic_standalone container_name: supersonic_standalone
environment: environment:
DB_HOST: supersonic_mysql DB_HOST: supersonic_postgres
DB_NAME: supersonic_db DB_NAME: postgres
DB_USERNAME: supersonic_user DB_USERNAME: supersonic_user
DB_PASSWORD: supersonic_password DB_PASSWORD: supersonic_password
CHROMA_HOST: supersonic_chroma
ports: ports:
- "9080:9080" - "9080:9080"
depends_on: depends_on:
chroma: postgres:
condition: service_healthy
mysql:
condition: service_healthy condition: service_healthy
db_init: db_init:
condition: service_completed_successfully condition: service_completed_successfully
@@ -112,8 +85,7 @@ services:
# propagation: rprivate # propagation: rprivate
# create_host_path: true # create_host_path: true
volumes: volumes:
mysql_data: postgres_data:
chroma_data:
supersonic_data: supersonic_data:
networks: networks:

View File

@@ -15,7 +15,7 @@ public class ColumnSchema {
private FieldType filedType; private FieldType filedType;
private AggOperatorEnum agg; private AggOperatorEnum agg = AggOperatorEnum.SUM;
private String name; private String name;

View File

@@ -9,7 +9,7 @@ import lombok.NoArgsConstructor;
@Data @Data
@AllArgsConstructor @AllArgsConstructor
@NoArgsConstructor @NoArgsConstructor
public class Dim { public class Dimension {
private String name; private String name;
@@ -27,16 +27,14 @@ public class Dim {
private String description; private String description;
private int isTag; public Dimension(String name, String bizName, DimensionType type, Integer isCreateDimension) {
public Dim(String name, String bizName, DimensionType type, Integer isCreateDimension) {
this.name = name; this.name = name;
this.type = type; this.type = type;
this.isCreateDimension = isCreateDimension; this.isCreateDimension = isCreateDimension;
this.bizName = bizName; this.bizName = bizName;
} }
public Dim(String name, String bizName, DimensionType type, Integer isCreateDimension, public Dimension(String name, String bizName, DimensionType type, Integer isCreateDimension,
String expr, String dateFormat, DimensionTimeTypeParams typeParams) { String expr, String dateFormat, DimensionTimeTypeParams typeParams) {
this.name = name; this.name = name;
this.type = type; this.type = type;
@@ -47,8 +45,8 @@ public class Dim {
this.bizName = bizName; this.bizName = bizName;
} }
public static Dim getDefault() { public static Dimension getDefault() {
return new Dim("数据日期", "imp_date", DimensionType.partition_time, 0, "imp_date", return new Dimension("数据日期", "imp_date", DimensionType.partition_time, 0, "imp_date",
Constants.DAY_FORMAT, new DimensionTimeTypeParams("false", "day")); Constants.DAY_FORMAT, new DimensionTimeTypeParams("false", "day"));
} }

View File

@@ -24,7 +24,7 @@ public class ModelDetail {
private List<Identify> identifiers = Lists.newArrayList(); private List<Identify> identifiers = Lists.newArrayList();
private List<Dim> dimensions = Lists.newArrayList(); private List<Dimension> dimensions = Lists.newArrayList();
private List<Measure> measures = Lists.newArrayList(); private List<Measure> measures = Lists.newArrayList();
@@ -39,7 +39,7 @@ public class ModelDetail {
return sqlQuery; return sqlQuery;
} }
public List<Dim> filterTimeDims() { public List<Dimension> filterTimeDims() {
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {
return Lists.newArrayList(); return Lists.newArrayList();
} }

View File

@@ -1,5 +1,5 @@
package com.tencent.supersonic.headless.api.pojo.enums; package com.tencent.supersonic.headless.api.pojo.enums;
public enum FieldType { public enum FieldType {
primary_key, foreign_key, partition_time, time, dimension, measure; primary_key, foreign_key, partition_time, time, categorical, measure;
} }

View File

@@ -32,8 +32,6 @@ public class DimensionReq extends SchemaItem {
private DataTypeEnums dataType; private DataTypeEnums dataType;
private int isTag;
private Map<String, Object> ext; private Map<String, Object> ext;
private DimensionTimeTypeParams typeParams; private DimensionTimeTypeParams typeParams;

View File

@@ -9,6 +9,10 @@ import java.util.List;
@Data @Data
public class ModelBuildReq { public class ModelBuildReq {
private String name;
private String bizName;
private Long databaseId; private Long databaseId;
private Long domainId; private Long domainId;

View File

@@ -211,7 +211,9 @@ public class QueryStructReq extends SemanticQueryReq {
SelectItem selectExpressionItem = new SelectItem(function); SelectItem selectExpressionItem = new SelectItem(function);
String alias = String alias =
StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName; StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
if (!alias.equals(columnName)) {
selectExpressionItem.setAlias(new Alias(alias)); selectExpressionItem.setAlias(new Alias(alias));
}
return selectExpressionItem; return selectExpressionItem;
} }

View File

@@ -18,6 +18,8 @@ public class DimensionResp extends SchemaItem {
private Long modelId; private Long modelId;
private Long domainId;
private DimensionType type; private DimensionType type;
private String expr; private String expr;

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.api.pojo.response; package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension; import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Field; import com.tencent.supersonic.headless.api.pojo.Field;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
@@ -62,7 +62,7 @@ public class ModelResp extends SchemaItem {
return isOpen != null && isOpen == 1; return isOpen != null && isOpen == 1;
} }
public List<Dim> getTimeDimension() { public List<Dimension> getTimeDimension() {
if (modelDetail == null) { if (modelDetail == null) {
return Lists.newArrayList(); return Lists.newArrayList();
} }

View File

@@ -19,6 +19,7 @@ public class DictWord {
private String word; private String word;
private String nature; private String nature;
private String natureWithFrequency; private String natureWithFrequency;
private String alias;
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {

View File

@@ -1,11 +1,14 @@
package com.tencent.supersonic.headless.chat.knowledge; package com.tencent.supersonic.headless.chat.knowledge;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.tencent.supersonic.common.pojo.enums.DictWordType; import com.tencent.supersonic.common.pojo.enums.DictWordType;
import com.tencent.supersonic.headless.api.pojo.response.S2Term; import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper; import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@@ -14,6 +17,31 @@ import java.util.stream.Collectors;
@Service @Service
@Slf4j @Slf4j
public class KnowledgeBaseService { public class KnowledgeBaseService {
private static volatile Map<Long, List<DictWord>> dimValueAliasMap = new HashMap<>();
public static Map<Long, List<DictWord>> getDimValueAlias() {
return dimValueAliasMap;
}
public static List<DictWord> addDimValueAlias(Long dimId, List<DictWord> newWords) {
List<DictWord> dimValueAlias =
dimValueAliasMap.containsKey(dimId) ? dimValueAliasMap.get(dimId)
: new ArrayList<>();
Set<String> wordSet =
dimValueAlias
.stream().map(word -> String.format("%s_%s_%s",
word.getNatureWithFrequency(), word.getWord(), word.getAlias()))
.collect(Collectors.toSet());
for (DictWord dictWord : newWords) {
String key = String.format("%s_%s_%s", dictWord.getNatureWithFrequency(),
dictWord.getWord(), dictWord.getAlias());
if (!wordSet.contains(key)) {
dimValueAlias.add(dictWord);
}
}
dimValueAliasMap.put(dimId, dimValueAlias);
return dimValueAlias;
}
public void updateSemanticKnowledge(List<DictWord> natures) { public void updateSemanticKnowledge(List<DictWord> natures) {
@@ -41,6 +69,11 @@ public class KnowledgeBaseService {
} }
// 2. update online knowledge // 2. update online knowledge
if (CollectionUtils.isNotEmpty(dimValueAliasMap)) {
for (Long dimId : dimValueAliasMap.keySet()) {
natures.addAll(dimValueAliasMap.get(dimId));
}
}
updateOnlineKnowledge(natures); updateOnlineKnowledge(natures);
} }

View File

@@ -12,6 +12,8 @@ import com.hankcs.hanlp.dictionary.other.CharTable;
import com.hankcs.hanlp.seg.common.Term; import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.utility.LexiconUtility; import com.hankcs.hanlp.utility.LexiconUtility;
import com.hankcs.hanlp.utility.TextUtility; import com.hankcs.hanlp.utility.TextUtility;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.DictWordType;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper; import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
@@ -103,7 +105,22 @@ public class MultiCustomDictionary extends DynamicCustomDictionary {
String word = getWordBySpace(param[0]); String word = getWordBySpace(param[0]);
if (isLetters) { if (isLetters) {
original = word; original = word;
word = word.toLowerCase(); // word = word.toLowerCase();
// 加入小写别名
if (!original.equals(word.toLowerCase())) {
DictWord dictWord = new DictWord();
String nature = param[1];
dictWord.setNatureWithFrequency(
String.format("%s " + Constants.DEFAULT_FREQUENCY, nature));
dictWord.setWord(word);
dictWord.setAlias(word.toLowerCase());
String[] split = nature.split(DictWordType.NATURE_SPILT);
if (split.length >= 2) {
Long dimId = Long.parseLong(
nature.split(DictWordType.NATURE_SPILT)[split.length - 1]);
KnowledgeBaseService.addDimValueAlias(dimId, Arrays.asList(dictWord));
}
}
} }
if (natureCount == 0) { if (natureCount == 0) {
attribute = new CoreDictionary.Attribute(defaultNature); attribute = new CoreDictionary.Attribute(defaultNature);

View File

@@ -8,12 +8,15 @@ import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo;
import com.tencent.supersonic.headless.api.pojo.response.S2Term; import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.ChatQueryContext; import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.knowledge.DatabaseMapResult; import com.tencent.supersonic.headless.chat.knowledge.DatabaseMapResult;
import com.tencent.supersonic.headless.chat.knowledge.DictWord;
import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult; import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult;
import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService;
import com.tencent.supersonic.headless.chat.knowledge.builder.BaseWordBuilder; import com.tencent.supersonic.headless.chat.knowledge.builder.BaseWordBuilder;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper; import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper; import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.chat.utils.EditDistanceUtils; import com.tencent.supersonic.headless.chat.utils.EditDistanceUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.HashSet; import java.util.HashSet;
@@ -83,12 +86,32 @@ public class KeywordMapper extends BaseMapper {
.element(element).frequency(frequency).word(hanlpMapResult.getName()) .element(element).frequency(frequency).word(hanlpMapResult.getName())
.similarity(hanlpMapResult.getSimilarity()) .similarity(hanlpMapResult.getSimilarity())
.detectWord(hanlpMapResult.getDetectWord()).build(); .detectWord(hanlpMapResult.getDetectWord()).build();
// doDimValueAliasLogic 将维度值别名进行替换成真实维度值
doDimValueAliasLogic(schemaElementMatch);
addToSchemaMap(chatQueryContext.getMapInfo(), dataSetId, schemaElementMatch); addToSchemaMap(chatQueryContext.getMapInfo(), dataSetId, schemaElementMatch);
} }
} }
} }
private void doDimValueAliasLogic(SchemaElementMatch schemaElementMatch) {
SchemaElement element = schemaElementMatch.getElement();
if (SchemaElementType.VALUE.equals(element.getType())) {
Long dimId = element.getId();
String word = schemaElementMatch.getWord();
Map<Long, List<DictWord>> dimValueAlias = KnowledgeBaseService.getDimValueAlias();
if (Objects.nonNull(dimId) && StringUtils.isNotEmpty(word)
&& dimValueAlias.containsKey(dimId)) {
Map<String, DictWord> aliasAndDictMap = dimValueAlias.get(dimId).stream()
.collect(Collectors.toMap(dictWord -> dictWord.getAlias(),
dictWord -> dictWord, (v1, v2) -> v2));
if (aliasAndDictMap.containsKey(word)) {
String wordTech = aliasAndDictMap.get(word).getWord();
schemaElementMatch.setWord(wordTech);
}
}
}
}
private void convertMapResultToMapInfo(ChatQueryContext chatQueryContext, private void convertMapResultToMapInfo(ChatQueryContext chatQueryContext,
List<DatabaseMapResult> mapResults) { List<DatabaseMapResult> mapResults) {
for (DatabaseMapResult match : mapResults) { for (DatabaseMapResult match : mapResults) {

View File

@@ -0,0 +1,37 @@
package com.tencent.supersonic.headless.chat.mapper;
import com.tencent.supersonic.common.pojo.enums.Text2SQLType;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Slf4j
public class TimeFieldMapper extends BaseMapper {
@Override
public void doMap(ChatQueryContext chatQueryContext) {
if (chatQueryContext.getRequest().getText2SQLType().equals(Text2SQLType.ONLY_RULE)) {
return;
}
Map<Long, DataSetSchema> schemaMap =
chatQueryContext.getSemanticSchema().getDataSetSchemaMap();
for (Map.Entry<Long, DataSetSchema> entry : schemaMap.entrySet()) {
List<SchemaElement> timeDims = entry.getValue().getDimensions().stream()
.filter(dim -> dim.getTimeFormat() != null).collect(Collectors.toList());
for (SchemaElement schemaElement : timeDims) {
chatQueryContext.getMapInfo().getMatchedElements(entry.getKey())
.add(SchemaElementMatch.builder().word(schemaElement.getName())
.element(schemaElement).detectWord(schemaElement.getName())
.similarity(1.0).build());
}
}
}
}

View File

@@ -84,7 +84,7 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
return connection.getMetaData(); return connection.getMetaData();
} }
protected static FieldType classifyColumnType(String typeName) { public FieldType classifyColumnType(String typeName) {
switch (typeName.toUpperCase()) { switch (typeName.toUpperCase()) {
case "INT": case "INT":
case "INTEGER": case "INTEGER":
@@ -101,7 +101,7 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
case "TIMESTAMP": case "TIMESTAMP":
return FieldType.time; return FieldType.time;
default: default:
return FieldType.dimension; return FieldType.categorical;
} }
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.adaptor.db; package com.tencent.supersonic.headless.core.adaptor.db;
import com.tencent.supersonic.headless.api.pojo.DBColumn; import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo; import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import java.sql.SQLException; import java.sql.SQLException;
@@ -19,4 +20,6 @@ public interface DbAdaptor {
List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName) List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException; throws SQLException;
FieldType classifyColumnType(String typeName);
} }

View File

@@ -114,7 +114,8 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
return dbColumns; return dbColumns;
} }
protected static FieldType classifyColumnType(String typeName) { @Override
public FieldType classifyColumnType(String typeName) {
switch (typeName.toUpperCase()) { switch (typeName.toUpperCase()) {
case "INT": case "INT":
case "INTEGER": case "INTEGER":
@@ -141,7 +142,7 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
case "CHARACTER": case "CHARACTER":
case "UUID": case "UUID":
default: default:
return FieldType.dimension; return FieldType.categorical;
} }
} }

View File

@@ -61,13 +61,18 @@ public class SqlQueryConverter implements QueryConverter {
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields); List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics = List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields); Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getMetrics().addAll(metrics); ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions); ontologyQueryParam.getDimensions().addAll(dimensions);
ontologyQueryParam.setAggOption(aggOption); AggOption sqlQueryAggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption)); // if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(ontologyQueryParam.getAggOption()));
queryStatement.setOntologyQueryParam(ontologyQueryParam); queryStatement.setOntologyQueryParam(ontologyQueryParam);
generateDerivedMetric(sqlGenerateUtils, queryStatement); generateDerivedMetric(sqlGenerateUtils, queryStatement);

View File

@@ -57,7 +57,11 @@ public class StructQueryConverter implements QueryConverter {
.map(Aggregator::getColumn).collect(Collectors.toList())); .map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null); String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where); ontologyQueryParam.setWhere(where);
ontologyQueryParam.setAggOption(AggOption.AGGREGATION); if (ontologyQueryParam.getMetrics().isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery()); ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream() ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection())) .map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))

View File

@@ -26,8 +26,8 @@ public class DataModelNode extends SemanticNode {
sqlTable = dataModel.getSqlQuery(); sqlTable = dataModel.getSqlQuery();
} else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) { } else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) {
if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) { if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = Arrays.stream(dataModel.getTableQuery().split("\\.")) String fullTableName =
.collect(Collectors.joining(".public.")); String.join(".public.", dataModel.getTableQuery().split("\\."));
sqlTable = "select * from " + fullTableName; sqlTable = "select * from " + fullTableName;
} else { } else {
sqlTable = "select * from " + dataModel.getTableQuery(); sqlTable = "select * from " + dataModel.getTableQuery();
@@ -64,7 +64,7 @@ public class DataModelNode extends SemanticNode {
for (Dimension d : datasource.getDimensions()) { for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers = List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope); expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString())); identifiers.forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName()); dimensions.add(d.getName());
} }
for (Identify i : datasource.getIdentifiers()) { for (Identify i : datasource.getIdentifiers()) {
@@ -73,7 +73,7 @@ public class DataModelNode extends SemanticNode {
for (Measure m : datasource.getMeasures()) { for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers = List<SqlNode> identifiers =
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope); expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> { identifiers.forEach(i -> {
if (!dimensions.contains(i.toString())) { if (!dimensions.contains(i.toString())) {
metrics.add(i.toString()); metrics.add(i.toString());
} }
@@ -127,7 +127,7 @@ public class DataModelNode extends SemanticNode {
} }
public static String getNames(List<DataModel> dataModelList) { public static String getNames(List<DataModel> dataModelList) {
return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_")); return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
} }
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam, public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam,
@@ -138,12 +138,12 @@ public class DataModelNode extends SemanticNode {
: d) : d)
.collect(Collectors.toSet())); .collect(Collectors.toSet()));
Set<String> schemaMetricName = Set<String> schemaMetricName =
ontology.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName())) ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream() .forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName()))); .forEach(mm -> queryMeasures.add(mm.getName())));
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)) queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(m -> queryMeasures.add(m)); .forEach(queryMeasures::add);
} }
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, public static void mergeQueryFilterDimensionMeasure(Ontology ontology,
@@ -155,13 +155,13 @@ public class DataModelNode extends SemanticNode {
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType), FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType),
filterConditions); filterConditions);
Set<String> queryMeasures = new HashSet<>(measures); Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = ontology.getMetrics().stream().map(m -> m.getName()) Set<String> schemaMetricName =
.collect(Collectors.toSet()); ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
for (String filterCondition : filterConditions) { for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) { if (schemaMetricName.contains(filterCondition)) {
ontology.getMetrics().stream() ontology.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition)) .filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream() .forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName()))); .forEach(mm -> queryMeasures.add(mm.getName())));
continue; continue;
} }
@@ -196,8 +196,8 @@ public class DataModelNode extends SemanticNode {
} }
// second, traverse the ontology to find other related dataModels // second, traverse the ontology to find other related dataModels
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, baseDataModel, List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, queryParam,
queryDimensions, queryMeasures); baseDataModel, queryDimensions, queryMeasures);
if (CollectionUtils.isEmpty(relatedDataModels)) { if (CollectionUtils.isEmpty(relatedDataModels)) {
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel, relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
queryDimensions, queryMeasures); queryDimensions, queryMeasures);
@@ -255,7 +255,7 @@ public class DataModelNode extends SemanticNode {
.collect(Collectors.toSet()); .collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName) Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
baseDataModel.getIdentifiers().stream().forEach(i -> baseDimensions.add(i.getName())); baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
baseMeasures.retainAll(queryMeasures); baseMeasures.retainAll(queryMeasures);
if (baseMeasures.size() < queryMeasures.size()) { if (baseMeasures.size() < queryMeasures.size()) {
@@ -282,7 +282,8 @@ public class DataModelNode extends SemanticNode {
} }
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology, private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
DataModel baseDataModel, Set<String> queryDimensions, Set<String> queryMeasures) { OntologyQueryParam queryParam, DataModel baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>(); Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>(); List<DataModel> joinDataModels = new ArrayList<>();
Set<String> before = new HashSet<>(); Set<String> before = new HashSet<>();
@@ -295,7 +296,7 @@ public class DataModelNode extends SemanticNode {
visitJoinRelations, sortedJoinRelation); visitJoinRelations, sortedJoinRelation);
ontology.getJoinRelations().stream() ontology.getJoinRelations().stream()
.filter(j -> !visitJoinRelations.contains(j.getId())) .filter(j -> !visitJoinRelations.contains(j.getId()))
.forEach(j -> sortedJoinRelation.add(j)); .forEach(sortedJoinRelation::add);
for (JoinRelation joinRelation : sortedJoinRelation) { for (JoinRelation joinRelation : sortedJoinRelation) {
if (!before.contains(joinRelation.getLeft()) if (!before.contains(joinRelation.getLeft())
&& !before.contains(joinRelation.getRight())) { && !before.contains(joinRelation.getRight())) {
@@ -305,13 +306,17 @@ public class DataModelNode extends SemanticNode {
boolean isRight = before.contains(joinRelation.getLeft()); boolean isRight = before.contains(joinRelation.getLeft());
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight()) DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight())
: ontology.getDataModelMap().get(joinRelation.getLeft()); : ontology.getDataModelMap().get(joinRelation.getLeft());
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
: joinRelation.getJoinCondition().get(0).getLeft();
if (!queryDimensions.isEmpty()) { if (!queryDimensions.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream() Set<String> linkDimension = other.getDimensions().stream()
.map(dd -> dd.getName()).collect(Collectors.toSet()); .map(Dimension::getName).collect(Collectors.toSet());
other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName())); other.getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
// joinDim should be added to the query dimension
queryParam.getDimensions().add(joinDimName);
} }
} }
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName) Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
@@ -322,7 +327,7 @@ public class DataModelNode extends SemanticNode {
} }
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) { if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName()) Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
.stream().map(dd -> dd.getName()).collect(Collectors.toSet()); .stream().map(Dimension::getName).collect(Collectors.toSet());
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
@@ -382,15 +387,14 @@ public class DataModelNode extends SemanticNode {
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) { if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
continue; continue;
} }
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName()) long identifierNum = entry.getValue().getIdentifiers().stream().map(Identify::getName)
.filter(i -> baseIdentifiers.contains(i)).count(); .filter(baseIdentifiers::contains).count();
if (identifierNum > 0) { if (identifierNum > 0) {
boolean isMatch = false; boolean isMatch = false;
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream() Set<String> linkDimension = entry.getValue().getDimensions().stream()
.map(dd -> dd.getName()).collect(Collectors.toSet()); .map(Dimension::getName).collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream() entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
@@ -398,7 +402,7 @@ public class DataModelNode extends SemanticNode {
} }
if (!measures.isEmpty()) { if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream() Set<String> linkMeasure = entry.getValue().getMeasures().stream()
.map(mm -> mm.getName()).collect(Collectors.toSet()); .map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(measures); linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) { if (!linkMeasure.isEmpty()) {
isMatch = true; isMatch = true;

View File

@@ -15,6 +15,6 @@ public class OntologyQueryParam {
private String where; private String where;
private Long limit; private Long limit;
private List<ColumnOrder> order; private List<ColumnOrder> order;
private boolean nativeQuery = false; private boolean nativeQuery = true;
private AggOption aggOption = AggOption.DEFAULT; private AggOption aggOption = AggOption.NATIVE;
} }

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.core.utils; package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams; import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType; import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor; import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
@@ -20,11 +20,11 @@ public class SysTimeDimensionBuilder {
Pattern.compile("\\b(DATE|TIME|TIMESTAMP|YEAR|MONTH|DAY|HOUR|MINUTE|SECOND)\\b", Pattern.compile("\\b(DATE|TIME|TIMESTAMP|YEAR|MONTH|DAY|HOUR|MINUTE|SECOND)\\b",
Pattern.CASE_INSENSITIVE); Pattern.CASE_INSENSITIVE);
public static void addSysTimeDimension(List<Dim> dims, DbAdaptor engineAdaptor) { public static void addSysTimeDimension(List<Dimension> dims, DbAdaptor engineAdaptor) {
log.debug("addSysTimeDimension before:{}, engineAdaptor:{}", dims, engineAdaptor); log.debug("addSysTimeDimension before:{}, engineAdaptor:{}", dims, engineAdaptor);
Dim timeDim = getTimeDim(dims); Dimension timeDim = getTimeDim(dims);
if (timeDim == null) { if (timeDim == null) {
timeDim = Dim.getDefault(); timeDim = Dimension.getDefault();
// todo not find the time dimension // todo not find the time dimension
return; return;
} }
@@ -34,8 +34,8 @@ public class SysTimeDimensionBuilder {
log.debug("addSysTimeDimension after:{}, engineAdaptor:{}", dims, engineAdaptor); log.debug("addSysTimeDimension after:{}, engineAdaptor:{}", dims, engineAdaptor);
} }
private static Dim generateSysDayDimension(Dim timeDim, DbAdaptor engineAdaptor) { private static Dimension generateSysDayDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim(); Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.DAY.getName()); dim.setBizName(TimeDimensionEnum.DAY.getName());
dim.setType(DimensionType.partition_time); dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.DAY.name().toLowerCase(), dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.DAY.name().toLowerCase(),
@@ -47,8 +47,8 @@ public class SysTimeDimensionBuilder {
return dim; return dim;
} }
private static Dim generateSysWeekDimension(Dim timeDim, DbAdaptor engineAdaptor) { private static Dimension generateSysWeekDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim(); Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.WEEK.getName()); dim.setBizName(TimeDimensionEnum.WEEK.getName());
dim.setType(DimensionType.partition_time); dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.WEEK.name().toLowerCase(), dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.WEEK.name().toLowerCase(),
@@ -60,8 +60,8 @@ public class SysTimeDimensionBuilder {
return dim; return dim;
} }
private static Dim generateSysMonthDimension(Dim timeDim, DbAdaptor engineAdaptor) { private static Dimension generateSysMonthDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim(); Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.MONTH.getName()); dim.setBizName(TimeDimensionEnum.MONTH.getName());
dim.setType(DimensionType.partition_time); dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.MONTH.name().toLowerCase(), dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.MONTH.name().toLowerCase(),
@@ -79,7 +79,8 @@ public class SysTimeDimensionBuilder {
} }
// Check whether the time field contains keywords,Generation time expression // Check whether the time field contains keywords,Generation time expression
private static String generateTimeExpr(Dim timeDim, String dateType, DbAdaptor engineAdaptor) { private static String generateTimeExpr(Dimension timeDim, String dateType,
DbAdaptor engineAdaptor) {
String bizName = timeDim.getBizName(); String bizName = timeDim.getBizName();
String dateFormat = timeDim.getDateFormat(); String dateFormat = timeDim.getDateFormat();
if (containsTimeKeyword(bizName)) { if (containsTimeKeyword(bizName)) {
@@ -90,8 +91,8 @@ public class SysTimeDimensionBuilder {
} }
} }
private static Dim getTimeDim(List<Dim> timeDims) { private static Dimension getTimeDim(List<Dimension> timeDims) {
for (Dim dim : timeDims) { for (Dimension dim : timeDims) {
if (dim.getType().equals(DimensionType.partition_time)) { if (dim.getType().equals(DimensionType.partition_time)) {
return dim; return dim;
} }

View File

@@ -207,7 +207,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
ModelResp modelResp = modelResps.get(0); ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(), String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
modelResp.getName()); modelResp.getName());
List<Dim> timeDims = modelResp.getTimeDimension(); List<Dimension> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) { if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql, sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(), TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),

View File

@@ -27,7 +27,7 @@ public class SchemaDictUpdateListener implements ApplicationListener<DataEvent>
dictWord.setWord(dataItem.getName()); dictWord.setWord(dataItem.getName());
String sign = DictWordType.NATURE_SPILT; String sign = DictWordType.NATURE_SPILT;
String suffixNature = DictWordType.getSuffixNature(dataItem.getType()); String suffixNature = DictWordType.getSuffixNature(dataItem.getType());
String nature = sign + dataItem.getModelId() + dataItem.getId() + suffixNature; String nature = sign + dataItem.getModelId() + sign + dataItem.getId() + suffixNature;
String natureWithFrequency = nature + " " + Constants.DEFAULT_FREQUENCY; String natureWithFrequency = nature + " " + Constants.DEFAULT_FREQUENCY;
dictWord.setNature(nature); dictWord.setNature(nature);
dictWord.setNatureWithFrequency(natureWithFrequency); dictWord.setNatureWithFrequency(natureWithFrequency);

View File

@@ -1,6 +1,6 @@
package com.tencent.supersonic.headless.server.manager; package com.tencent.supersonic.headless.server.manager;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail; import com.tencent.supersonic.headless.api.pojo.ModelDetail;
@@ -18,7 +18,6 @@ import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -54,7 +53,7 @@ public class ModelYamlManager {
return dataModelYamlTpl; return dataModelYamlTpl;
} }
public static DimensionYamlTpl convert(Dim dim) { public static DimensionYamlTpl convert(Dimension dim) {
DimensionYamlTpl dimensionYamlTpl = new DimensionYamlTpl(); DimensionYamlTpl dimensionYamlTpl = new DimensionYamlTpl();
BeanUtils.copyProperties(dim, dimensionYamlTpl); BeanUtils.copyProperties(dim, dimensionYamlTpl);
dimensionYamlTpl.setName(dim.getBizName()); dimensionYamlTpl.setName(dim.getBizName());
@@ -85,15 +84,4 @@ public class ModelYamlManager {
return identifyYamlTpl; return identifyYamlTpl;
} }
private static void addInterCntMetric(String datasourceEnName, ModelDetail datasourceDetail) {
Measure measure = new Measure();
measure.setExpr("1");
if (!CollectionUtils.isEmpty(datasourceDetail.getIdentifiers())) {
measure.setExpr(datasourceDetail.getIdentifiers().get(0).getBizName());
}
measure.setAgg("count");
measure.setBizName(String.format("%s_%s", datasourceEnName, "internal_cnt"));
measure.setIsCreateMetric(1);
datasourceDetail.getMeasures().add(measure);
}
} }

View File

@@ -23,7 +23,7 @@ public class RuleSemanticModeller implements SemanticModeller {
private ColumnSchema convert(DBColumn dbColumn) { private ColumnSchema convert(DBColumn dbColumn) {
ColumnSchema columnSchema = new ColumnSchema(); ColumnSchema columnSchema = new ColumnSchema();
columnSchema.setName(dbColumn.getComment()); columnSchema.setName(dbColumn.getColumnName());
columnSchema.setColumnName(dbColumn.getColumnName()); columnSchema.setColumnName(dbColumn.getColumnName());
columnSchema.setComment(dbColumn.getComment()); columnSchema.setComment(dbColumn.getComment());
columnSchema.setDataType(dbColumn.getDataType()); columnSchema.setDataType(dbColumn.getDataType());

View File

@@ -16,6 +16,7 @@ public class ModelDO {
private Long domainId; private Long domainId;
@Deprecated
private Long tagObjectId; private Long tagObjectId;
private String name; private String name;

View File

@@ -28,6 +28,7 @@ public class QueryStatDO {
private String queryStructCmd; private String queryStructCmd;
@TableField("struct_cmd_md5") @TableField("struct_cmd_md5")
private String queryStructCmdMd5; private String queryStructCmdMd5;
@TableField("\"sql\"")
private String sql; private String sql;
private String sqlMd5; private String sqlMd5;
private String queryEngine; private String queryEngine;

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.mapper; package com.tencent.supersonic.headless.server.persistence.mapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO; import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter; import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Mapper;
@@ -12,11 +11,7 @@ public interface DimensionDOCustomMapper {
void batchInsert(List<DimensionDO> dimensionDOS); void batchInsert(List<DimensionDO> dimensionDOS);
void batchUpdate(List<DimensionDO> dimensionDOS);
void batchUpdateStatus(List<DimensionDO> dimensionDOS); void batchUpdateStatus(List<DimensionDO> dimensionDOS);
List<DimensionDO> query(DimensionFilter dimensionFilter);
List<DimensionDO> queryDimensions(DimensionsFilter dimensionsFilter); List<DimensionDO> queryDimensions(DimensionsFilter dimensionsFilter);
} }

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.mapper; package com.tencent.supersonic.headless.server.persistence.mapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO; import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.pojo.MetricFilter;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter; import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Mapper;
@@ -20,7 +19,5 @@ public interface MetricDOCustomMapper {
void updateClassificationsBatch(List<MetricDO> metricDOS); void updateClassificationsBatch(List<MetricDO> metricDOS);
List<MetricDO> query(MetricFilter metricFilter);
List<MetricDO> queryMetrics(MetricsFilter metricsFilter); List<MetricDO> queryMetrics(MetricsFilter metricsFilter);
} }

View File

@@ -1,15 +1,9 @@
package com.tencent.supersonic.headless.server.persistence.mapper; package com.tencent.supersonic.headless.server.persistence.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.tencent.supersonic.headless.api.pojo.QueryStat;
import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq;
import com.tencent.supersonic.headless.server.persistence.dataobject.QueryStatDO; import com.tencent.supersonic.headless.server.persistence.dataobject.QueryStatDO;
import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Mapper;
import java.util.List;
@Mapper @Mapper
public interface StatMapper extends BaseMapper<QueryStatDO> { public interface StatMapper extends BaseMapper<QueryStatDO> {
List<QueryStat> getStatInfo(ItemUseReq itemUseCommend);
} }

View File

@@ -118,6 +118,7 @@ public class DictRepositoryImpl implements DictRepository {
wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or() wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or()
.like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key)); .like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key));
} }
wrapper.lambda().orderByDesc(DictTaskDO::getCreatedAt);
return dictTaskMapper.selectList(wrapper); return dictTaskMapper.selectList(wrapper);
} }

View File

@@ -1,14 +1,17 @@
package com.tencent.supersonic.headless.server.persistence.repository.impl; package com.tencent.supersonic.headless.server.persistence.repository.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO; import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOCustomMapper; import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOCustomMapper;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper; import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository; import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter; import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter; import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.List; import java.util.List;
import java.util.Objects;
@Service @Service
public class DimensionRepositoryImpl implements DimensionRepository { public class DimensionRepositoryImpl implements DimensionRepository {
@@ -50,7 +53,43 @@ public class DimensionRepositoryImpl implements DimensionRepository {
@Override @Override
public List<DimensionDO> getDimension(DimensionFilter dimensionFilter) { public List<DimensionDO> getDimension(DimensionFilter dimensionFilter) {
return dimensionDOCustomMapper.query(dimensionFilter); QueryWrapper<DimensionDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().ne(DimensionDO::getStatus, 3);
if (Objects.nonNull(dimensionFilter.getIds()) && !dimensionFilter.getIds().isEmpty()) {
queryWrapper.lambda().in(DimensionDO::getId, dimensionFilter.getIds());
}
if (StringUtils.isNotBlank(dimensionFilter.getId())) {
queryWrapper.lambda().eq(DimensionDO::getId, dimensionFilter.getId());
}
if (Objects.nonNull(dimensionFilter.getModelIds())
&& !dimensionFilter.getModelIds().isEmpty()) {
queryWrapper.lambda().in(DimensionDO::getModelId, dimensionFilter.getModelIds());
}
if (StringUtils.isNotBlank(dimensionFilter.getName())) {
queryWrapper.lambda().like(DimensionDO::getName, dimensionFilter.getName());
}
if (StringUtils.isNotBlank(dimensionFilter.getId())) {
queryWrapper.lambda().like(DimensionDO::getBizName, dimensionFilter.getBizName());
}
if (Objects.nonNull(dimensionFilter.getStatus())) {
queryWrapper.lambda().eq(DimensionDO::getStatus, dimensionFilter.getStatus());
}
if (Objects.nonNull(dimensionFilter.getSensitiveLevel())) {
queryWrapper.lambda().eq(DimensionDO::getSensitiveLevel,
dimensionFilter.getSensitiveLevel());
}
if (StringUtils.isNotBlank(dimensionFilter.getCreatedBy())) {
queryWrapper.lambda().eq(DimensionDO::getCreatedBy, dimensionFilter.getCreatedBy());
}
if (StringUtils.isNotBlank(dimensionFilter.getKey())) {
String key = dimensionFilter.getKey();
queryWrapper.lambda().like(DimensionDO::getName, key).or()
.like(DimensionDO::getBizName, key).or().like(DimensionDO::getDescription, key)
.or().like(DimensionDO::getAlias, key).or()
.like(DimensionDO::getCreatedBy, key);
}
return dimensionDOMapper.selectList(queryWrapper);
} }
@Override @Override

View File

@@ -9,9 +9,11 @@ import com.tencent.supersonic.headless.server.persistence.mapper.MetricQueryDefa
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository; import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.pojo.MetricFilter; import com.tencent.supersonic.headless.server.pojo.MetricFilter;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter; import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.List; import java.util.List;
import java.util.Objects;
@Component @Component
public class MetricRepositoryImpl implements MetricRepository { public class MetricRepositoryImpl implements MetricRepository {
@@ -73,7 +75,46 @@ public class MetricRepositoryImpl implements MetricRepository {
@Override @Override
public List<MetricDO> getMetric(MetricFilter metricFilter) { public List<MetricDO> getMetric(MetricFilter metricFilter) {
return metricDOCustomMapper.query(metricFilter); QueryWrapper<MetricDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().ne(MetricDO::getStatus, 3);
if (Objects.nonNull(metricFilter.getIds()) && !metricFilter.getIds().isEmpty()) {
queryWrapper.lambda().in(MetricDO::getId, metricFilter.getIds());
}
if (StringUtils.isNotBlank(metricFilter.getId())) {
queryWrapper.lambda().eq(MetricDO::getId, metricFilter.getId());
}
if (Objects.nonNull(metricFilter.getModelIds()) && !metricFilter.getModelIds().isEmpty()) {
queryWrapper.lambda().in(MetricDO::getModelId, metricFilter.getModelIds());
}
if (StringUtils.isNotBlank(metricFilter.getType())) {
queryWrapper.lambda().eq(MetricDO::getType, metricFilter.getType());
}
if (StringUtils.isNotBlank(metricFilter.getName())) {
queryWrapper.lambda().like(MetricDO::getName, metricFilter.getName());
}
if (StringUtils.isNotBlank(metricFilter.getId())) {
queryWrapper.lambda().like(MetricDO::getBizName, metricFilter.getBizName());
}
if (Objects.nonNull(metricFilter.getStatus())) {
queryWrapper.lambda().eq(MetricDO::getStatus, metricFilter.getStatus());
}
if (Objects.nonNull(metricFilter.getSensitiveLevel())) {
queryWrapper.lambda().eq(MetricDO::getSensitiveLevel, metricFilter.getSensitiveLevel());
}
if (StringUtils.isNotBlank(metricFilter.getCreatedBy())) {
queryWrapper.lambda().eq(MetricDO::getCreatedBy, metricFilter.getCreatedBy());
}
if (Objects.nonNull(metricFilter.getIsPublish()) && metricFilter.getIsPublish() == 1) {
queryWrapper.lambda().eq(MetricDO::getIsPublish, metricFilter.getIsPublish());
}
if (StringUtils.isNotBlank(metricFilter.getKey())) {
String key = metricFilter.getKey();
queryWrapper.lambda().like(MetricDO::getName, key).or().like(MetricDO::getBizName, key)
.or().like(MetricDO::getDescription, key).or().like(MetricDO::getAlias, key)
.or().like(MetricDO::getCreatedBy, key);
}
return metricDOMapper.selectList(queryWrapper);
} }
@Override @Override

View File

@@ -51,7 +51,7 @@ public class QueryRuleRepositoryImpl implements QueryRuleRepository {
QueryWrapper<QueryRuleDO> wrapperSys = new QueryWrapper<>(); QueryWrapper<QueryRuleDO> wrapperSys = new QueryWrapper<>();
// 返回系统设置的规则 // 返回系统设置的规则
wrapperSys.or().eq("priority", 0L); wrapperSys.lambda().or().eq(QueryRuleDO::getPriority, 0L);
List<QueryRuleDO> queryRuleDOListSys = mapper.selectList(wrapperSys); List<QueryRuleDO> queryRuleDOListSys = mapper.selectList(wrapperSys);
queryRuleDOList.addAll(queryRuleDOListSys); queryRuleDOList.addAll(queryRuleDOListSys);

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.repository.impl; package com.tencent.supersonic.headless.server.persistence.repository.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
@@ -47,7 +48,7 @@ public class StatRepositoryImpl implements StatRepository {
@SneakyThrows @SneakyThrows
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseReq) { public List<ItemUseResp> getStatInfo(ItemUseReq itemUseReq) {
List<ItemUseResp> result = new ArrayList<>(); List<ItemUseResp> result = new ArrayList<>();
List<QueryStat> statInfos = statMapper.getStatInfo(itemUseReq); List<QueryStatDO> statInfos = getQueryStats(itemUseReq);
Map<String, Long> map = new ConcurrentHashMap<>(); Map<String, Long> map = new ConcurrentHashMap<>();
statInfos.stream().forEach(stat -> { statInfos.stream().forEach(stat -> {
String dimensions = stat.getDimensions(); String dimensions = stat.getDimensions();
@@ -70,6 +71,21 @@ public class StatRepositoryImpl implements StatRepository {
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
private List<QueryStatDO> getQueryStats(ItemUseReq itemUseReq) {
QueryWrapper<QueryStatDO> queryWrapper = new QueryWrapper<>();
if (Objects.nonNull(itemUseReq.getModelId())) {
queryWrapper.lambda().eq(QueryStatDO::getModelId, itemUseReq.getModelId());
}
if (Objects.nonNull(itemUseReq.getModelIds()) && !itemUseReq.getModelIds().isEmpty()) {
queryWrapper.lambda().in(QueryStatDO::getModelId, itemUseReq.getModelIds());
}
if (Objects.nonNull(itemUseReq.getMetric())) {
queryWrapper.lambda().like(QueryStatDO::getMetrics, itemUseReq.getMetric());
}
return statMapper.selectList(queryWrapper);
}
private void updateStatMapInfo(Map<String, Long> map, String dimensions, String type, private void updateStatMapInfo(Map<String, Long> map, String dimensions, String type,
Long dataSetId) { Long dataSetId) {
if (StringUtils.isNotEmpty(dimensions)) { if (StringUtils.isNotEmpty(dimensions)) {

View File

@@ -47,7 +47,7 @@ public interface DimensionService {
void sendDimensionEventBatch(List<Long> modelIds, EventType eventType); void sendDimensionEventBatch(List<Long> modelIds, EventType eventType);
DataEvent getDataEvent(); DataEvent getAllDataEvents();
Boolean updateDimValueAlias(DimValueAliasReq req, User user); Boolean updateDimValueAlias(DimValueAliasReq req, User user);
} }

View File

@@ -79,7 +79,7 @@ public class DataSetServiceImpl extends ServiceImpl<DataSetDOMapper, DataSetDO>
DataSetDO dataSetDO = convert(dataSetReq); DataSetDO dataSetDO = convert(dataSetReq);
dataSetDO.setStatus(StatusEnum.ONLINE.getCode()); dataSetDO.setStatus(StatusEnum.ONLINE.getCode());
DataSetResp dataSetResp = convert(dataSetDO); DataSetResp dataSetResp = convert(dataSetDO);
conflictCheck(dataSetResp); // conflictCheck(dataSetResp);
save(dataSetDO); save(dataSetDO);
dataSetResp.setId(dataSetDO.getId()); dataSetResp.setId(dataSetDO.getId());
return dataSetResp; return dataSetResp;
@@ -90,7 +90,7 @@ public class DataSetServiceImpl extends ServiceImpl<DataSetDOMapper, DataSetDO>
dataSetReq.updatedBy(user.getName()); dataSetReq.updatedBy(user.getName());
DataSetDO dataSetDO = convert(dataSetReq); DataSetDO dataSetDO = convert(dataSetReq);
DataSetResp dataSetResp = convert(dataSetDO); DataSetResp dataSetResp = convert(dataSetDO);
conflictCheck(dataSetResp); // conflictCheck(dataSetResp);
updateById(dataSetDO); updateById(dataSetDO);
return dataSetResp; return dataSetResp;
} }

View File

@@ -225,6 +225,9 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
if (StringUtils.isNotBlank(modelBuildReq.getSql())) { if (StringUtils.isNotBlank(modelBuildReq.getSql())) {
List<DBColumn> columns = List<DBColumn> columns =
getColumns(modelBuildReq.getDatabaseId(), modelBuildReq.getSql()); getColumns(modelBuildReq.getDatabaseId(), modelBuildReq.getSql());
DatabaseResp databaseResp = getDatabase(modelBuildReq.getDatabaseId());
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
columns.forEach(c -> c.setFieldType(engineAdaptor.classifyColumnType(c.getDataType())));
dbColumnMap.put(modelBuildReq.getSql(), columns); dbColumnMap.put(modelBuildReq.getSql(), columns);
} else { } else {
for (String table : modelBuildReq.getTables()) { for (String table : modelBuildReq.getTables()) {

View File

@@ -6,12 +6,9 @@ import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo; import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.EventType; import com.tencent.supersonic.common.pojo.enums.EventType;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
@@ -25,23 +22,14 @@ import com.tencent.supersonic.headless.api.pojo.request.DimValueAliasReq;
import com.tencent.supersonic.headless.api.pojo.request.DimensionReq; import com.tencent.supersonic.headless.api.pojo.request.DimensionReq;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq; import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq;
import com.tencent.supersonic.headless.api.pojo.request.PageDimensionReq; import com.tencent.supersonic.headless.api.pojo.request.PageDimensionReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO; import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper; import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository; import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter; import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter; import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import com.tencent.supersonic.headless.server.pojo.ModelFilter; import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.DataSetService; import com.tencent.supersonic.headless.server.service.*;
import com.tencent.supersonic.headless.server.service.DatabaseService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.ModelRelaService;
import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.TagMetaService;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper; import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.DimensionConverter; import com.tencent.supersonic.headless.server.utils.DimensionConverter;
import com.tencent.supersonic.headless.server.utils.NameCheckUtils; import com.tencent.supersonic.headless.server.utils.NameCheckUtils;
@@ -152,11 +140,7 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
DimensionConverter.convert(dimensionDO, dimensionReq); DimensionConverter.convert(dimensionDO, dimensionReq);
dimensionRepository.updateDimension(dimensionDO); dimensionRepository.updateDimension(dimensionDO);
if (!oldName.equals(dimensionDO.getName())) { if (!oldName.equals(dimensionDO.getName())) {
sendEvent( sendEvent(getDataItem(dimensionDO), EventType.UPDATE);
DataItem.builder().modelId(dimensionDO.getModelId() + Constants.UNDERLINE)
.newName(dimensionReq.getName()).name(oldName).type(TypeEnums.DIMENSION)
.id(dimensionDO.getId() + Constants.UNDERLINE).build(),
EventType.UPDATE);
} }
} }
@@ -424,7 +408,7 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
eventPublisher.publishEvent(dataEvent); eventPublisher.publishEvent(dataEvent);
} }
public DataEvent getDataEvent() { public DataEvent getAllDataEvents() {
DimensionFilter dimensionFilter = new DimensionFilter(); DimensionFilter dimensionFilter = new DimensionFilter();
List<DimensionDO> dimensionDOS = queryDimension(dimensionFilter); List<DimensionDO> dimensionDOS = queryDimension(dimensionFilter);
return getDataEvent(dimensionDOS, EventType.ADD); return getDataEvent(dimensionDOS, EventType.ADD);
@@ -464,13 +448,18 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
return true; return true;
} }
private DataItem getDataItem(DimensionDO dimensionDO) {
ModelResp modelResp = modelService.getModel(dimensionDO.getModelId());
DimensionResp dimensionResp = DimensionConverter.convert2DimensionResp(dimensionDO,
ImmutableMap.of(modelResp.getId(), modelResp));
return DataItem.builder().id(dimensionResp.getId().toString()).name(dimensionResp.getName())
.bizName(dimensionResp.getBizName()).modelId(dimensionResp.getModelId().toString())
.domainId(dimensionResp.getDomainId().toString()).type(TypeEnums.DIMENSION).build();
}
private DataEvent getDataEvent(List<DimensionDO> dimensionDOS, EventType eventType) { private DataEvent getDataEvent(List<DimensionDO> dimensionDOS, EventType eventType) {
List<DataItem> dataItems = dimensionDOS.stream() List<DataItem> dataItems =
.map(dimensionDO -> DataItem.builder().id(dimensionDO.getId() + Constants.UNDERLINE) dimensionDOS.stream().map(this::getDataItem).collect(Collectors.toList());
.name(dimensionDO.getName())
.modelId(dimensionDO.getModelId() + Constants.UNDERLINE)
.type(TypeEnums.DIMENSION).build())
.collect(Collectors.toList());
return new DataEvent(this, dataItems, eventType); return new DataEvent(this, dataItems, eventType);
} }

View File

@@ -5,59 +5,26 @@ import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo; import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.*; import com.tencent.supersonic.common.pojo.enums.*;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.MetricParam;
import com.tencent.supersonic.headless.api.pojo.MetricQueryDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum; import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq; import com.tencent.supersonic.headless.api.pojo.request.*;
import com.tencent.supersonic.headless.api.pojo.request.MetricBaseReq; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.request.MetricReq;
import com.tencent.supersonic.headless.api.pojo.request.PageMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMapReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetMapInfo;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.MapInfoResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.server.facade.service.ChatLayerService; import com.tencent.supersonic.headless.server.facade.service.ChatLayerService;
import com.tencent.supersonic.headless.server.persistence.dataobject.CollectDO; import com.tencent.supersonic.headless.server.persistence.dataobject.CollectDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO; import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricQueryDefaultConfigDO; import com.tencent.supersonic.headless.server.persistence.dataobject.MetricQueryDefaultConfigDO;
import com.tencent.supersonic.headless.server.persistence.mapper.MetricDOMapper; import com.tencent.supersonic.headless.server.persistence.mapper.MetricDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository; import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter; import com.tencent.supersonic.headless.server.pojo.*;
import com.tencent.supersonic.headless.server.pojo.MetricFilter; import com.tencent.supersonic.headless.server.service.*;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import com.tencent.supersonic.headless.server.pojo.ModelCluster;
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.CollectService;
import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.TagMetaService;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper; import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.MetricCheckUtils; import com.tencent.supersonic.headless.server.utils.MetricCheckUtils;
import com.tencent.supersonic.headless.server.utils.MetricConverter; import com.tencent.supersonic.headless.server.utils.MetricConverter;
@@ -70,18 +37,7 @@ import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Service @Service
@@ -667,12 +623,13 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
} }
private DataItem getDataItem(MetricDO metricDO) { private DataItem getDataItem(MetricDO metricDO) {
MetricResp metricResp = ModelResp modelResp = modelService.getModel(metricDO.getModelId());
MetricConverter.convert2MetricResp(metricDO, new HashMap<>(), Lists.newArrayList()); MetricResp metricResp = MetricConverter.convert2MetricResp(metricDO,
ImmutableMap.of(modelResp.getId(), modelResp), Lists.newArrayList());
fillDefaultAgg(metricResp); fillDefaultAgg(metricResp);
return DataItem.builder().id(metricDO.getId() + Constants.UNDERLINE) return DataItem.builder().id(metricResp.getId().toString()).name(metricResp.getName())
.name(metricDO.getName()).bizName(metricDO.getBizName()) .bizName(metricResp.getBizName()).modelId(metricResp.getModelId().toString())
.modelId(metricDO.getModelId() + Constants.UNDERLINE).type(TypeEnums.METRIC) .domainId(metricResp.getDomainId().toString()).type(TypeEnums.METRIC)
.defaultAgg(metricResp.getDefaultAgg()).build(); .defaultAgg(metricResp.getDefaultAgg()).build();
} }

View File

@@ -12,7 +12,7 @@ import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.DBColumn; import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.DbSchema; import com.tencent.supersonic.headless.api.pojo.DbSchema;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.ItemDateFilter; import com.tencent.supersonic.headless.api.pojo.ItemDateFilter;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -78,23 +78,23 @@ import java.util.stream.Collectors;
@Slf4j @Slf4j
public class ModelServiceImpl implements ModelService { public class ModelServiceImpl implements ModelService {
private ModelRepository modelRepository; private final ModelRepository modelRepository;
private DatabaseService databaseService; private final DatabaseService databaseService;
private DimensionService dimensionService; private final DimensionService dimensionService;
private MetricService metricService; private final MetricService metricService;
private DomainService domainService; private final DomainService domainService;
private UserService userService; private final UserService userService;
private DataSetService dataSetService; private final DataSetService dataSetService;
private DateInfoRepository dateInfoRepository; private final DateInfoRepository dateInfoRepository;
private ModelRelaService modelRelaService; private final ModelRelaService modelRelaService;
ExecutorService executor = ExecutorService executor =
new ThreadPoolExecutor(0, 5, 5L, TimeUnit.SECONDS, new LinkedBlockingQueue<>()); new ThreadPoolExecutor(0, 5, 5L, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
@@ -292,12 +292,9 @@ public class ModelServiceImpl implements ModelService {
if (modelReq.getModelDetail() == null) { if (modelReq.getModelDetail() == null) {
return; return;
} }
List<Dim> dims = modelReq.getModelDetail().getDimensions(); List<Dimension> dims = modelReq.getModelDetail().getDimensions();
List<Measure> measures = modelReq.getModelDetail().getMeasures(); List<Measure> measures = modelReq.getModelDetail().getMeasures();
List<Identify> identifies = modelReq.getModelDetail().getIdentifiers(); List<Identify> identifies = modelReq.getModelDetail().getIdentifiers();
if (CollectionUtils.isEmpty(dims)) {
throw new InvalidArgumentException("缺少维度信息");
}
for (Measure measure : measures) { for (Measure measure : measures) {
String measureForbiddenCharacters = String measureForbiddenCharacters =
NameCheckUtils.findForbiddenCharacters(measure.getName()); NameCheckUtils.findForbiddenCharacters(measure.getName());
@@ -308,7 +305,7 @@ public class ModelServiceImpl implements ModelService {
throw new InvalidArgumentException(message); throw new InvalidArgumentException(message);
} }
} }
for (Dim dim : dims) { for (Dimension dim : dims) {
String dimForbiddenCharacters = NameCheckUtils.findForbiddenCharacters(dim.getName()); String dimForbiddenCharacters = NameCheckUtils.findForbiddenCharacters(dim.getName());
if (StringUtils.isNotBlank(dim.getName()) if (StringUtils.isNotBlank(dim.getName())
&& StringUtils.isNotBlank(dimForbiddenCharacters)) { && StringUtils.isNotBlank(dimForbiddenCharacters)) {
@@ -337,12 +334,10 @@ public class ModelServiceImpl implements ModelService {
Set<String> relations = new HashSet<>(); Set<String> relations = new HashSet<>();
for (ModelRela modelRela : modelRelas) { for (ModelRela modelRela : modelRelas) {
if (modelRela.getFromModelId().equals(modelReq.getId())) { if (modelRela.getFromModelId().equals(modelReq.getId())) {
modelRela.getJoinConditions().stream() modelRela.getJoinConditions().forEach(r -> relations.add(r.getLeftField()));
.forEach(r -> relations.add(r.getLeftField()));
} }
if (modelRela.getToModelId().equals(modelReq.getId())) { if (modelRela.getToModelId().equals(modelReq.getId())) {
modelRela.getJoinConditions().stream() modelRela.getJoinConditions().forEach(r -> relations.add(r.getRightField()));
.forEach(r -> relations.add(r.getRightField()));
} }
} }
if (relations.isEmpty()) { if (relations.isEmpty()) {
@@ -351,10 +346,10 @@ public class ModelServiceImpl implements ModelService {
// any identify in model relation should not be deleted // any identify in model relation should not be deleted
if (modelReq.getModelDetail() == null if (modelReq.getModelDetail() == null
|| CollectionUtils.isEmpty(modelReq.getModelDetail().getIdentifiers())) { || CollectionUtils.isEmpty(modelReq.getModelDetail().getIdentifiers())) {
throw new InvalidArgumentException(String.format("模型关联中主键/外键不存在, 请检查")); throw new InvalidArgumentException("模型关联中主键/外键不存在, 请检查");
} }
List<String> modelIdentifiers = modelReq.getModelDetail().getIdentifiers().stream() List<String> modelIdentifiers = modelReq.getModelDetail().getIdentifiers().stream()
.map(i -> i.getBizName()).collect(Collectors.toList()); .map(Identify::getBizName).collect(Collectors.toList());
for (String rela : relations) { for (String rela : relations) {
if (!modelIdentifiers.contains(rela)) { if (!modelIdentifiers.contains(rela)) {
throw new InvalidArgumentException(String.format("模型关联中主键/外键(%s)不存在, 请检查", rela)); throw new InvalidArgumentException(String.format("模型关联中主键/外键(%s)不存在, 请检查", rela));
@@ -459,7 +454,7 @@ public class ModelServiceImpl implements ModelService {
} }
ModelFilter modelFilter = new ModelFilter(); ModelFilter modelFilter = new ModelFilter();
modelFilter.setDomainIds(domainIds); modelFilter.setDomainIds(domainIds);
modelFilter.setIncludesDetail(false); modelFilter.setIncludesDetail(true);
List<ModelResp> modelResps = getModelList(modelFilter); List<ModelResp> modelResps = getModelList(modelFilter);
if (CollectionUtils.isEmpty(modelResps)) { if (CollectionUtils.isEmpty(modelResps)) {
return modelResps; return modelResps;

View File

@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.server.service.impl;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType; import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.ValueDistribution; import com.tencent.supersonic.headless.api.pojo.ValueDistribution;
import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType; import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType;
@@ -93,7 +93,7 @@ public class TagQueryServiceImpl implements TagQueryService {
private void correctDateConf(ItemValueReq itemValueReq, TagResp tag, User user) private void correctDateConf(ItemValueReq itemValueReq, TagResp tag, User user)
throws Exception { throws Exception {
ModelResp model = modelService.getModel(tag.getModelId()); ModelResp model = modelService.getModel(tag.getModelId());
List<Dim> timeDimension = model.getTimeDimension(); List<Dimension> timeDimension = model.getTimeDimension();
if (CollectionUtils.isEmpty(timeDimension)) { if (CollectionUtils.isEmpty(timeDimension)) {
itemValueReq.setDateConf(null); itemValueReq.setDateConf(null);
return; return;
@@ -112,12 +112,12 @@ public class TagQueryServiceImpl implements TagQueryService {
itemValueReq.setDateConf(dateConf); itemValueReq.setDateConf(dateConf);
} }
private String queryTagDate(Dim dim) { private String queryTagDate(Dimension dim) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dim.getDateFormat()); DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dim.getDateFormat());
return LocalDate.now().plusDays(-dayBefore).format(formatter); return LocalDate.now().plusDays(-dayBefore).format(formatter);
} }
private String queryTagDateFromDbBySql(Dim dim, TagResp tag, ItemValueReq itemValueReq, private String queryTagDateFromDbBySql(Dimension dim, TagResp tag, ItemValueReq itemValueReq,
User user) { User user) {
String sqlPattern = "select max(%s) as %s from tbl where %s is not null"; String sqlPattern = "select max(%s) as %s from tbl where %s is not null";
@@ -129,7 +129,7 @@ public class TagQueryServiceImpl implements TagQueryService {
if (Objects.nonNull(itemValueReq) && itemValueReq.getDateConf().getUnit() > 1) { if (Objects.nonNull(itemValueReq) && itemValueReq.getDateConf().getUnit() > 1) {
ModelResp model = modelService.getModel(tag.getModelId()); ModelResp model = modelService.getModel(tag.getModelId());
if (Objects.nonNull(model)) { if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension(); List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) { if (!CollectionUtils.isEmpty(timeDims)) {
String dateFormat = timeDims.get(0).getDateFormat(); String dateFormat = timeDims.get(0).getDateFormat();
if (StringUtils.isEmpty(dateFormat)) { if (StringUtils.isEmpty(dateFormat)) {

View File

@@ -69,7 +69,7 @@ public class MetaEmbeddingTask implements CommandLineRunner {
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(), embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(metricDataItems)); TextSegmentConvert.convertToEmbedding(metricDataItems));
List<DataItem> dimensionDataItems = dimensionService.getDataEvent().getDataItems(); List<DataItem> dimensionDataItems = dimensionService.getAllDataEvents().getDataItems();
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(), embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(dimensionDataItems)); TextSegmentConvert.convertToEmbedding(dimensionDataItems));
} catch (Exception e) { } catch (Exception e) {

View File

@@ -14,7 +14,7 @@ import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.ItemValueConfig; import com.tencent.supersonic.headless.api.pojo.ItemValueConfig;
import com.tencent.supersonic.headless.api.pojo.request.DictItemReq; import com.tencent.supersonic.headless.api.pojo.request.DictItemReq;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
@@ -401,7 +401,7 @@ public class DictUtils {
private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model, private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model,
Integer itemValueDateStart, Integer itemValueDateEnd) { Integer itemValueDateStart, Integer itemValueDateEnd) {
if (Objects.nonNull(model)) { if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension(); List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) { if (!CollectionUtils.isEmpty(timeDims)) {
DateConf dateConf = new DateConf(); DateConf dateConf = new DateConf();
dateConf.setDateMode(DateConf.DateMode.BETWEEN); dateConf.setDateMode(DateConf.DateMode.BETWEEN);
@@ -496,7 +496,7 @@ public class DictUtils {
private boolean partitionedModel(Long modelId) { private boolean partitionedModel(Long modelId) {
ModelResp model = modelService.getModel(modelId); ModelResp model = modelService.getModel(modelId);
if (Objects.nonNull(model)) { if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension(); List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) { if (!CollectionUtils.isEmpty(timeDims)) {
return true; return true;
} }
@@ -507,7 +507,7 @@ public class DictUtils {
private String generateDictDateFilterRecent(DictItemResp dictItemResp) { private String generateDictDateFilterRecent(DictItemResp dictItemResp) {
ModelResp model = modelService.getModel(dictItemResp.getModelId()); ModelResp model = modelService.getModel(dictItemResp.getModelId());
if (Objects.nonNull(model)) { if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension(); List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) { if (!CollectionUtils.isEmpty(timeDims)) {
String dateFormat = timeDims.get(0).getDateFormat(); String dateFormat = timeDims.get(0).getDateFormat();
if (StringUtils.isEmpty(dateFormat)) { if (StringUtils.isEmpty(dateFormat)) {

View File

@@ -105,6 +105,8 @@ public class DimensionConverter {
dimensionResp.setType(getType(dimensionDO.getType())); dimensionResp.setType(getType(dimensionDO.getType()));
dimensionResp.setTypeEnum(TypeEnums.DIMENSION); dimensionResp.setTypeEnum(TypeEnums.DIMENSION);
dimensionResp.setIsTag(dimensionDO.getIsTag()); dimensionResp.setIsTag(dimensionDO.getIsTag());
dimensionResp.setDomainId(modelRespMap
.getOrDefault(dimensionResp.getModelId(), new ModelResp()).getDomainId());
return dimensionResp; return dimensionResp;
} }

View File

@@ -7,7 +7,7 @@ import com.tencent.supersonic.common.pojo.enums.PublishEnum;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByFieldParams; import com.tencent.supersonic.headless.api.pojo.MetricDefineByFieldParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams; import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMetricParams; import com.tencent.supersonic.headless.api.pojo.MetricDefineByMetricParams;
@@ -82,7 +82,7 @@ public class MetricConverter {
metricResp.setModelName(modelResp.getName()); metricResp.setModelName(modelResp.getName());
metricResp.setModelBizName(modelResp.getBizName()); metricResp.setModelBizName(modelResp.getBizName());
metricResp.setDomainId(modelResp.getDomainId()); metricResp.setDomainId(modelResp.getDomainId());
List<Dim> timeDims = modelResp.getTimeDimension(); List<Dimension> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) { if (CollectionUtils.isNotEmpty(timeDims)) {
metricResp.setContainsPartitionDimensions(true); metricResp.setContainsPartitionDimensions(true);
} }

View File

@@ -8,7 +8,7 @@ import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.ColumnSchema; import com.tencent.supersonic.headless.api.pojo.ColumnSchema;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension; import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -111,7 +111,7 @@ public class ModelConverter {
return measureResp; return measureResp;
} }
public static DimensionReq convert(Dim dim, ModelDO modelDO) { public static DimensionReq convert(Dimension dim, ModelDO modelDO) {
DimensionReq dimensionReq = new DimensionReq(); DimensionReq dimensionReq = new DimensionReq();
dimensionReq.setName(dim.getName()); dimensionReq.setName(dim.getName());
dimensionReq.setBizName(dim.getBizName()); dimensionReq.setBizName(dim.getBizName());
@@ -129,7 +129,6 @@ public class ModelConverter {
dimensionReq.setType(dim.getType().name()); dimensionReq.setType(dim.getType().name());
dimensionReq dimensionReq
.setDescription(Objects.isNull(dim.getDescription()) ? "" : dim.getDescription()); .setDescription(Objects.isNull(dim.getDescription()) ? "" : dim.getDescription());
dimensionReq.setIsTag(dim.getIsTag());
dimensionReq.setTypeParams(dim.getTypeParams()); dimensionReq.setTypeParams(dim.getTypeParams());
return dimensionReq; return dimensionReq;
} }
@@ -165,8 +164,8 @@ public class ModelConverter {
public static ModelReq convert(ModelSchema modelSchema, ModelBuildReq modelBuildReq, public static ModelReq convert(ModelSchema modelSchema, ModelBuildReq modelBuildReq,
String tableName) { String tableName) {
ModelReq modelReq = new ModelReq(); ModelReq modelReq = new ModelReq();
modelReq.setName(modelSchema.getName()); modelReq.setName(modelBuildReq.getName());
modelReq.setBizName(modelSchema.getBizName()); modelReq.setBizName(modelBuildReq.getBizName());
modelReq.setDatabaseId(modelBuildReq.getDatabaseId()); modelReq.setDatabaseId(modelBuildReq.getDatabaseId());
modelReq.setDomainId(modelBuildReq.getDomainId()); modelReq.setDomainId(modelBuildReq.getDomainId());
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
@@ -188,7 +187,7 @@ public class ModelConverter {
columnSchema.getAgg().getOperator(), 1); columnSchema.getAgg().getOperator(), 1);
modelDetail.getMeasures().add(measure); modelDetail.getMeasures().add(measure);
} else { } else {
Dim dim = new Dim(columnSchema.getName(), columnSchema.getColumnName(), Dimension dim = new Dimension(columnSchema.getName(), columnSchema.getColumnName(),
DimensionType.valueOf(columnSchema.getFiledType().name()), 1); DimensionType.valueOf(columnSchema.getFiledType().name()), 1);
modelDetail.getDimensions().add(dim); modelDetail.getDimensions().add(dim);
} }
@@ -198,10 +197,12 @@ public class ModelConverter {
} }
private static IdentifyType getIdentifyType(FieldType fieldType) { private static IdentifyType getIdentifyType(FieldType fieldType) {
if (FieldType.foreign_key.equals(fieldType) || FieldType.primary_key.equals(fieldType)) { if (FieldType.primary_key.equals(fieldType)) {
return IdentifyType.primary; return IdentifyType.primary;
} else { } else if (FieldType.foreign_key.equals(fieldType)) {
return IdentifyType.foreign; return IdentifyType.foreign;
} else {
return null;
} }
} }
@@ -214,7 +215,7 @@ public class ModelConverter {
return modelDescs; return modelDescs;
} }
private static boolean isCreateDimension(Dim dim) { private static boolean isCreateDimension(Dimension dim) {
return dim.getIsCreateDimension() == 1 && StringUtils.isNotBlank(dim.getName()); return dim.getIsCreateDimension() == 1 && StringUtils.isNotBlank(dim.getName());
} }
@@ -226,7 +227,7 @@ public class ModelConverter {
return measure.getIsCreateMetric() == 1 && StringUtils.isNotBlank(measure.getName()); return measure.getIsCreateMetric() == 1 && StringUtils.isNotBlank(measure.getName());
} }
public static List<Dim> getDimToCreateDimension(ModelDetail modelDetail) { public static List<Dimension> getDimToCreateDimension(ModelDetail modelDetail) {
if (CollectionUtils.isEmpty(modelDetail.getDimensions())) { if (CollectionUtils.isEmpty(modelDetail.getDimensions())) {
return Lists.newArrayList(); return Lists.newArrayList();
} }
@@ -254,7 +255,7 @@ public class ModelConverter {
List<DimensionReq> dimensionReqs = Lists.newArrayList(); List<DimensionReq> dimensionReqs = Lists.newArrayList();
ModelDetail modelDetail = ModelDetail modelDetail =
JSONObject.parseObject(modelDO.getModelDetail(), ModelDetail.class); JSONObject.parseObject(modelDO.getModelDetail(), ModelDetail.class);
List<Dim> dims = getDimToCreateDimension(modelDetail); List<Dimension> dims = getDimToCreateDimension(modelDetail);
if (!CollectionUtils.isEmpty(dims)) { if (!CollectionUtils.isEmpty(dims)) {
dimensionReqs = dims.stream().filter(dim -> StringUtils.isNotBlank(dim.getName())) dimensionReqs = dims.stream().filter(dim -> StringUtils.isNotBlank(dim.getName()))
.map(dim -> convert(dim, modelDO)).collect(Collectors.toList()); .map(dim -> convert(dim, modelDO)).collect(Collectors.toList());

View File

@@ -40,28 +40,4 @@
<result column="query_opt_mode" property="queryOptMode"/> <result column="query_opt_mode" property="queryOptMode"/>
</resultMap> </resultMap>
<select id="getStatInfo"
resultType="com.tencent.supersonic.headless.api.pojo.QueryStat">
select *
from s2_query_stat_info
<where>
<if test="startTime != null">
and start_time >= #{startTime}String.valueOf(queryFilter.getValue())
</if>
<if test="modelId != null">
and model_id = #{modelId}
</if>
<if test="modelIds != null and modelIds.size() > 0">
and model_id in
<foreach item="id" collection="modelIds" open="(" separator="," close=")">
#{id}
</foreach>
</if>
<if test="metric != null">
and metrics like concat('%',#{metric},'%')
</if>
</where>
</select>
</mapper> </mapper>

View File

@@ -116,63 +116,6 @@
</foreach> </foreach>
</update> </update>
<select id="query" resultMap="ResultMapWithBLOBs">
select t.*, (case when t1.id is not null then 1 else 0 end) as isTag
from s2_dimension t
left join (
select *
from s2_tag
where type = 'DIMENSION'
) t1 on t.id = t1.item_id
where status != 3
<if test="key != null and key != ''">
and ( t.id like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.biz_name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.alias like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.description like CONCAT('%',#{key , jdbcType=VARCHAR},'%') )
</if>
<if test="id != null">
and t.id like CONCAT('%',#{id , jdbcType=VARCHAR},'%')
</if>
<if test="name != null and name != '' ">
and t.name like CONCAT('%',#{name , jdbcType=VARCHAR},'%')
</if>
<if test="bizName != null and bizName != ''">
and t.biz_name like CONCAT('%',#{bizName , jdbcType=VARCHAR},'%')
</if>
<if test="sensitiveLevel != null">
and t.sensitive_level = #{sensitiveLevel}
</if>
<if test="status != null">
and t.status = #{status}
</if>
<if test="modelIds != null and modelIds.size >0">
and t.model_id in
<foreach collection="modelIds" index="index" item="model" open="(" close=")"
separator=",">
#{model}
</foreach>
</if>
<if test="ids != null and ids.size >0">
and t.id in
<foreach collection="ids" index="index" item="id" open="(" close=")"
separator=",">
#{id}
</foreach>
</if>
<if test="createdBy != null">
and t.created_by = #{createdBy}
</if>
<if test="isTag != null and isTag == 1">
and t1.id is not null
</if>
<if test="isTag != null and isTag == 0">
and t1.id is null
</if>
</select>
<select id="queryDimensions" resultMap="ResultMapWithBLOBs"> <select id="queryDimensions" resultMap="ResultMapWithBLOBs">
select * select *
from s2_dimension from s2_dimension

View File

@@ -127,70 +127,6 @@
</foreach> </foreach>
</update> </update>
<select id="query" resultMap="ResultMapWithBLOBs">
select t.*, (case when t1.id is not null then 1 else 0 end) as isTag
from s2_metric t
left join (
select *
from s2_tag
where type = 'METRIC'
) t1 on t.id = t1.item_id
where t.status != 3
<if test="type != null and type != ''">
and t.type = #{type}
</if>
<if test="key != null and key != ''">
and ( t.id like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.biz_name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.description like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.alias like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.classifications like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.created_by like CONCAT('%',#{key , jdbcType=VARCHAR},'%') )
</if>
<if test="id != null">
and t.id like CONCAT('%',#{id , jdbcType=VARCHAR},'%')
</if>
<if test="name != null and name != '' ">
and t.name like CONCAT('%',#{name , jdbcType=VARCHAR},'%')
</if>
<if test="bizName != null and bizName != ''">
and t.biz_name like CONCAT('%',#{bizName , jdbcType=VARCHAR},'%')
</if>
<if test="sensitiveLevel != null">
and t.sensitive_level = #{sensitiveLevel}
</if>
<if test="status != null">
and t.status = #{status}
</if>
<if test="modelIds != null and modelIds.size >0">
and t.model_id in
<foreach collection="modelIds" index="index" item="model" open="(" close=")"
separator=",">
#{model}
</foreach>
</if>
<if test="ids != null and ids.size >0">
and t.id in
<foreach collection="ids" index="index" item="id" open="(" close=")"
separator=",">
#{id}
</foreach>
</if>
<if test="createdBy != null">
and t.created_by = #{createdBy}
</if>
<if test="isTag != null and isTag == 1">
and t1.id is not null
</if>
<if test="isTag != null and isTag == 0">
and t1.id is null
</if>
<if test="isPublish != null and isPublish == 1">
and (t.created_by = #{userName} or t.is_publish = 1)
</if>
</select>
<select id="queryMetrics" resultMap="ResultMapWithBLOBs"> <select id="queryMetrics" resultMap="ResultMapWithBLOBs">
select * select *
from s2_metric from s2_metric

View File

@@ -5,7 +5,7 @@ import com.tencent.supersonic.auth.api.authentication.service.UserService;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams; import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -95,11 +95,11 @@ class ModelServiceImplTest {
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name")); identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -134,11 +134,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a")); identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a"));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date_a", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date_a", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page_a", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page_a", DimensionType.categorical, 0);
dimension2.setExpr("page_a"); dimension2.setExpr("page_a");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -169,11 +169,11 @@ class ModelServiceImplTest {
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name")); identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -207,11 +207,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name")); identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -252,11 +252,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a")); identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a"));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date_a", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date_a", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page_a", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page_a", DimensionType.categorical, 0);
dimension2.setExpr("page_a"); dimension2.setExpr("page_a");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);

View File

@@ -4,6 +4,7 @@ com.tencent.supersonic.headless.chat.mapper.SchemaMapper=\
com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \ com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \
com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \ com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \
com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \ com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \
com.tencent.supersonic.headless.chat.mapper.TimeFieldMapper,\
com.tencent.supersonic.headless.chat.mapper.TermDescMapper com.tencent.supersonic.headless.chat.mapper.TermDescMapper
com.tencent.supersonic.headless.chat.parser.SemanticParser=\ com.tencent.supersonic.headless.chat.parser.SemanticParser=\
@@ -27,9 +28,9 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\

View File

@@ -89,14 +89,14 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin")); modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList()); modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("公司名称", "company_name", DimensionType.categorical, 1)); dimensions.add(new Dimension("公司名称", "company_name", DimensionType.categorical, 1));
dimensions.add(new Dim("总部地点", "headquarter_address", DimensionType.categorical, 1)); dimensions.add(new Dimension("总部地点", "headquarter_address", DimensionType.categorical, 1));
dimensions.add(new Dim("成立时间", "company_established_time", DimensionType.time, 1)); dimensions.add(new Dimension("成立时间", "company_established_time", DimensionType.time, 1));
dimensions.add(new Dim("创始人", "founder", DimensionType.categorical, 1)); dimensions.add(new Dimension("创始人", "founder", DimensionType.categorical, 1));
dimensions.add(new Dim("首席执行官", "ceo", DimensionType.categorical, 1)); dimensions.add(new Dimension("首席执行官", "ceo", DimensionType.categorical, 1));
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("公司id", IdentifyType.primary.name(), "company_id")); identifiers.add(new Identify("公司id", IdentifyType.primary.name(), "company_id"));
@@ -131,12 +131,13 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin")); modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList()); modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("品牌名称", "brand_name", DimensionType.categorical, 1)); dimensions.add(new Dimension("品牌名称", "brand_name", DimensionType.categorical, 1));
dimensions.add(new Dim("品牌成立时间", "brand_established_time", DimensionType.time, 1)); dimensions.add(new Dimension("品牌成立时间", "brand_established_time", DimensionType.time, 1));
dimensions.add(new Dim("法定代表人", "legal_representative", DimensionType.categorical, 1)); dimensions
.add(new Dimension("法定代表人", "legal_representative", DimensionType.categorical, 1));
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("品牌id", IdentifyType.primary.name(), "brand_id")); identifiers.add(new Identify("品牌id", IdentifyType.primary.name(), "brand_id"));
@@ -168,10 +169,10 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin")); modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList()); modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("财年", "year_time", DimensionType.time, 1, "year_time", "yyyy", dimensions.add(new Dimension("财年", "year_time", DimensionType.time, 1, "year_time", "yyyy",
new DimensionTimeTypeParams("false", "year"))); new DimensionTimeTypeParams("false", "year")));
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();

View File

@@ -16,7 +16,7 @@ import com.tencent.supersonic.headless.api.pojo.AggregateTypeDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.DataSetDetail; import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig; import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.DetailTypeDefaultConfig; import com.tencent.supersonic.headless.api.pojo.DetailTypeDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail; import com.tencent.supersonic.headless.api.pojo.ModelDetail;
@@ -101,10 +101,10 @@ public class S2SingerDemo extends S2BaseDemo {
identifiers.add(identify); identifiers.add(identify);
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
dimensions.add(new Dim("活跃区域", "act_area", DimensionType.categorical, 1)); dimensions.add(new Dimension("活跃区域", "act_area", DimensionType.categorical, 1));
dimensions.add(new Dim("代表作", "song_name", DimensionType.categorical, 1)); dimensions.add(new Dimension("代表作", "song_name", DimensionType.categorical, 1));
dimensions.add(new Dim("流派", "genre", DimensionType.categorical, 1)); dimensions.add(new Dimension("流派", "genre", DimensionType.categorical, 1));
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
Measure measure1 = new Measure("播放量", "js_play_cnt", "sum", 1); Measure measure1 = new Measure("播放量", "js_play_cnt", "sum", 1);

View File

@@ -27,7 +27,7 @@ import com.tencent.supersonic.common.util.ChatAppManager;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.DataSetDetail; import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig; import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams; import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Field; import com.tencent.supersonic.headless.api.pojo.Field;
import com.tencent.supersonic.headless.api.pojo.FieldParam; import com.tencent.supersonic.headless.api.pojo.FieldParam;
@@ -199,9 +199,9 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户", IdentifyType.primary.name(), "user_name", 1)); identifiers.add(new Identify("用户", IdentifyType.primary.name(), "user_name", 1));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
dimensions.add(new Dim("部门", "department", DimensionType.categorical, 1)); dimensions.add(new Dimension("部门", "department", DimensionType.categorical, 1));
// dimensions.add(new Dim("用户", "user_name", DimensionType.categorical, 1)); // dimensions.add(new Dimension("用户", "user_name", DimensionType.categorical, 1));
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
List<Field> fields = Lists.newArrayList(); List<Field> fields = Lists.newArrayList();
fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build()); fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build());
@@ -230,11 +230,11 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户名", IdentifyType.foreign.name(), "user_name", 0)); identifiers.add(new Identify("用户名", IdentifyType.foreign.name(), "user_name", 0));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -274,11 +274,11 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户", IdentifyType.foreign.name(), "user_name", 0)); identifiers.add(new Identify("用户", IdentifyType.foreign.name(), "user_name", 0));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("数据日期", "imp_date", DimensionType.partition_time, 1); Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dim dimension2 = new Dim("页面", "page", DimensionType.categorical, 1); Dimension dimension2 = new Dimension("页面", "page", DimensionType.categorical, 1);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);

View File

@@ -4,6 +4,7 @@ com.tencent.supersonic.headless.chat.mapper.SchemaMapper=\
com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \ com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \
com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \ com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \
com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \ com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \
com.tencent.supersonic.headless.chat.mapper.TimeFieldMapper,\
com.tencent.supersonic.headless.chat.mapper.TermDescMapper com.tencent.supersonic.headless.chat.mapper.TermDescMapper
com.tencent.supersonic.headless.chat.parser.SemanticParser=\ com.tencent.supersonic.headless.chat.parser.SemanticParser=\
@@ -27,9 +28,9 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\

View File

@@ -0,0 +1,26 @@
spring:
datasource:
url: jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/${DB_NAME}?stringtype=unspecified
username: ${DB_USERNAME}
password: ${DB_PASSWORD}
driver-class-name: org.postgresql.Driver
sql:
init:
enabled: false
mode: always
username: ${DB_USERNAME}
password: ${DB_PASSWORD}
schema-locations: classpath:db/schema-postgres.sql,classpath:db/schema-postgres-demo.sql
data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql
s2:
embedding:
store:
provider: PGVECTOR
base:
url: ${DB_HOST}
port: ${DB_PORT:5432}
databaseName: ${DB_NAME}
user: ${DB_USERNAME}
password: ${DB_PASSWORD}
dimension: 512

View File

@@ -12,3 +12,15 @@ spring:
password: postgres password: postgres
schema-locations: classpath:db/schema-postgres.sql,classpath:db/schema-postgres-demo.sql schema-locations: classpath:db/schema-postgres.sql,classpath:db/schema-postgres-demo.sql
data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql
#s2:
# embedding:
# store:
# provider: PGVECTOR
# base:
# url: 127.0.0.1
# port: 5432
# databaseName: postgres
# user: postgres
# password: postgres
# dimension: 512

View File

@@ -1,6 +0,0 @@
tom _1_2 1
alice _1_2 1
lucy _1_2 1
dean _1_2 1
john _1_2 1
jack _1_2 1

View File

@@ -1,4 +0,0 @@
p1 _3_4 3
p3 _3_4 4
p4 _3_4 4
p5 _3_4 2

View File

@@ -1,3 +0,0 @@
欧美 _4_5 1
港台 _4_5 3
内地 _4_5 2

View File

@@ -1,6 +0,0 @@
美人鱼 _4_6 1
青花瓷 _4_6 1
Love#Story _4_6 1
爱情转移 _4_6 1
人间烟火 _4_6 1
光的方向 _4_6 1

View File

@@ -1,6 +0,0 @@
张碧晨 _4_8 1
周杰伦 _4_8 1
Taylor#Swift _4_8 1
程响 _4_8 1
林俊杰 _4_8 1
陈奕迅 _4_8 1

View File

@@ -1,10 +1,10 @@
-- S2VisitsDemo -- S2VisitsDemo
MERGE into s2_user_department (user_name, department) values ('jack','HR'); INSERT INTO s2_user_department (user_name, department) values ('jack','HR');
MERGE into s2_user_department (user_name, department) values ('tom','sales'); INSERT INTO s2_user_department (user_name, department) values ('tom','sales');
MERGE into s2_user_department (user_name, department) values ('lucy','marketing'); INSERT INTO s2_user_department (user_name, department) values ('lucy','marketing');
MERGE into s2_user_department (user_name, department) values ('john','strategy'); INSERT INTO s2_user_department (user_name, department) values ('john','strategy');
MERGE into s2_user_department (user_name, department) values ('alice','sales'); INSERT INTO s2_user_department (user_name, department) values ('alice','sales');
MERGE into s2_user_department (user_name, department) values ('dean','marketing'); INSERT INTO s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1');
@@ -1020,61 +1020,61 @@ INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (
INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 8 DAY), 'lucy', '0.039935860913407284', 'p2'); INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 8 DAY), 'lucy', '0.039935860913407284', 'p2');
-- S2ArtistDemo -- S2ArtistDemo
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000); VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000); VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000); VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000); VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000); VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
MERGE into genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
-- S2CompanyDemo -- S2CompanyDemo
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000); INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473); INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503); INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000); INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000); INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000); INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_133',12300000000, 2300000000,30,30); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_133',12300000000, 2300000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_134',12400000000, 2400000000,10,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_134',12400000000, 2400000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_135',12500000000, 2500000000,30,30); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_135',12500000000, 2500000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_136',12600000000, 2600000000,40,40); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_136',12600000000, 2600000000,40,40);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_137',12700000000, 2700000000,50,50); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_137',12700000000, 2700000000,50,50);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_138',12800000000, 2800000000,20,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_138',12800000000, 2800000000,20,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_139',12900000000, 2900000000,60,70); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_139',12900000000, 2900000000,60,70);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_140',13000000000, 3000000000,80,100); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_140',13000000000, 3000000000,80,100);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_131',13100000000,3100000000, 10,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_131',13100000000,3100000000, 10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_132',13200000000, 3200000000,20,20); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_132',13200000000, 3200000000,20,20);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_133',13300000000, 3300000000,30,30); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_133',13300000000, 3300000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_134',13400000000, 3400000000,10,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_134',13400000000, 3400000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_135',13500000000, 3500000000,30,30); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_135',13500000000, 3500000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_136',13600000000, 3600000000,40,40); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_136',13600000000, 3600000000,40,40);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_137',13700000000, 3700000000,50,50); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_137',13700000000, 3700000000,50,50);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_138',13800000000, 3800000000,20,10); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_138',13800000000, 3800000000,20,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_139',13900000000, 3900000000,60,70); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_139',13900000000, 3900000000,60,70);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_140',14000000000, 4000000000,80,100); INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_140',14000000000, 4000000000,80,100);

View File

@@ -1,10 +1,10 @@
-- S2VisitsDemo -- S2VisitsDemo
MERGE into s2_user_department (user_name, department) values ('jack','HR'); INSERT into s2_user_department (user_name, department) values ('jack','HR');
MERGE into s2_user_department (user_name, department) values ('tom','sales'); INSERT into s2_user_department (user_name, department) values ('tom','sales');
MERGE into s2_user_department (user_name, department) values ('lucy','marketing'); INSERT into s2_user_department (user_name, department) values ('lucy','marketing');
MERGE into s2_user_department (user_name, department) values ('john','strategy'); INSERT into s2_user_department (user_name, department) values ('john','strategy');
MERGE into s2_user_department (user_name, department) values ('alice','sales'); INSERT into s2_user_department (user_name, department) values ('alice','sales');
MERGE into s2_user_department (user_name, department) values ('dean','marketing'); INSERT into s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'lucy', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'jack', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'jack', 'p1');
@@ -1016,43 +1016,43 @@ INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (
INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (CURRENT_DATE - INTERVAL '8 DAY', 'lucy', '0.039935860913407284', 'p2'); INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (CURRENT_DATE - INTERVAL '8 DAY', 'lucy', '0.039935860913407284', 'p2');
-- S2ArtistDemo -- S2ArtistDemo
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000); VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000); VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000); VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000); VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000); VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国'); INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
-- S2CompanyDemo -- S2CompanyDemo
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000); INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473); INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503); INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000); INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000); INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000); INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10); insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20); insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20);

View File

@@ -73,10 +73,10 @@ CREATE TABLE IF NOT EXISTS s2_chat_memory (
agent_id INTEGER, agent_id INTEGER,
db_schema TEXT, db_schema TEXT,
s2_sql TEXT, s2_sql TEXT,
status char(10), status varchar(20),
llm_review char(10), llm_review varchar(20),
llm_comment TEXT, llm_comment TEXT,
human_review char(10), human_review varchar(20),
human_comment TEXT, human_comment TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

View File

@@ -42,7 +42,7 @@ public class SemanticModellerTest extends BaseTest {
Assertions.assertEquals(2, userModelSchema.getColumnSchemas().size()); Assertions.assertEquals(2, userModelSchema.getColumnSchemas().size());
Assertions.assertEquals(FieldType.primary_key, Assertions.assertEquals(FieldType.primary_key,
userModelSchema.getColumnByName("user_name").getFiledType()); userModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.dimension, Assertions.assertEquals(FieldType.categorical,
userModelSchema.getColumnByName("department").getFiledType()); userModelSchema.getColumnByName("department").getFiledType());
ModelSchema stayTimeModelSchema = modelSchemaMap.get("s2_stay_time_statis"); ModelSchema stayTimeModelSchema = modelSchemaMap.get("s2_stay_time_statis");
@@ -51,7 +51,7 @@ public class SemanticModellerTest extends BaseTest {
stayTimeModelSchema.getColumnByName("user_name").getFiledType()); stayTimeModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.partition_time, Assertions.assertEquals(FieldType.partition_time,
stayTimeModelSchema.getColumnByName("imp_date").getFiledType()); stayTimeModelSchema.getColumnByName("imp_date").getFiledType());
Assertions.assertEquals(FieldType.dimension, Assertions.assertEquals(FieldType.categorical,
stayTimeModelSchema.getColumnByName("page").getFiledType()); stayTimeModelSchema.getColumnByName("page").getFiledType());
Assertions.assertEquals(FieldType.measure, Assertions.assertEquals(FieldType.measure,
stayTimeModelSchema.getColumnByName("stay_hours").getFiledType()); stayTimeModelSchema.getColumnByName("stay_hours").getFiledType());
@@ -75,9 +75,9 @@ public class SemanticModellerTest extends BaseTest {
Assertions.assertEquals(5, pvModelSchema.getColumnSchemas().size()); Assertions.assertEquals(5, pvModelSchema.getColumnSchemas().size());
Assertions.assertEquals(FieldType.partition_time, Assertions.assertEquals(FieldType.partition_time,
pvModelSchema.getColumnByName("imp_date").getFiledType()); pvModelSchema.getColumnByName("imp_date").getFiledType());
Assertions.assertEquals(FieldType.dimension, Assertions.assertEquals(FieldType.categorical,
pvModelSchema.getColumnByName("user_name").getFiledType()); pvModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.dimension, Assertions.assertEquals(FieldType.categorical,
pvModelSchema.getColumnByName("page").getFiledType()); pvModelSchema.getColumnByName("page").getFiledType());
Assertions.assertEquals(FieldType.measure, Assertions.assertEquals(FieldType.measure,
pvModelSchema.getColumnByName("pv").getFiledType()); pvModelSchema.getColumnByName("pv").getFiledType());

View File

@@ -1,2 +0,0 @@
[InternetShortcut]
URL=https://github.com/hankcs/HanLP/

View File

@@ -1,3 +0,0 @@
龚 nr 1
龛 ng 1
龛影 n 1

View File

@@ -1,4 +0,0 @@
买@水果 1
然后@来 1
我@遗忘 10
遗忘@我 10

View File

@@ -1,8 +0,0 @@
阿里云 _10_20 5
天猫 _10_20 5
腾讯游戏 _10_20 5
度小满 _10_20 5
京东金融 _10_20 5

View File

@@ -1,8 +0,0 @@
张勇 _10_22 5
马化腾 _10_22 5
朱光 _10_22 5
刘强东 _10_22 5

View File

@@ -1,5 +0,0 @@
hr _1_1 876
sales _1_1 872
marketing _1_1 310
strategy _1_1 360
sales _1_1 500

Some files were not shown because too many files have changed in this diff Show More