[improvement]Use QueryWrapper in place of hard-coded SQLs (#1944)
Some checks are pending
supersonic CentOS CI / build (11) (push) Waiting to run
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic CentOS CI / build (8) (push) Waiting to run
supersonic mac CI / build (11) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic mac CI / build (8) (push) Waiting to run
supersonic ubuntu CI / build (11) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (8) (push) Waiting to run
supersonic windows CI / build (11) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
supersonic windows CI / build (8) (push) Waiting to run

* [improvement][launcher]Use API to get element ID avoiding hard-code.

* [fix][launcher]Fix mysql scripts.

* [improvement][launcher]Support DuckDB database and refactor translator code structure.

* [improvement][headless-fe] Revamped the interaction for semantic modeling routing and successfully implemented the switching between dimension and dataset management.

* [improvement][Headless] Add table ddl in Dbschema

* [improvement][Headless] Add get database by type

* [improvement][Headless] Supports automatic batch creation of models based on db table names.

* [improvement][Headless] Supports getting domain by bizName

* [improvement][launcher]Refactor unit tests and demo data.

* [fix][launcher]Change default vector dimension to 512.

* [improvement](Dict) add dimValueAliasMap info for KnowledgeBaseService

* [improvement][headless]Use QueryWrapper to replace hard-code SQL in mapper xml.

* [improvement][chat]Introduce ChatMemory to delegate ChatMemoryDO.

* [fix][common]Fix embedding store sys configs.

* [fix][common]Fix postgres schema, using varchar instead of char.

* [improvement][launcher]Change supersonic docker deployment from mysql to postgres.

* [Fix][launcher]Fix a number of issues related to semantic modeling.

* [Fix][headless]Fix the evaluation logic of agg type.

* [fix][assembly]Fix Dockerfile and add docker compose run script.

* [fix][chat]Fix "multiple assignments to same column "similar_queries".

* [improvement][headless]Use LamdaQueryWrapper to avoid hard-coded column names.

* [improvement][headless]Refactor headless infra to support advanced semantic modelling.

* [improvement][headless]Change class name `Dim` to `Dimension`.

* [improvement][chat]Introduce `TimeFieldMapper` to always map time field.

* [fix][headless]Remove unnecessary dimension existence check.

* [fix][chat]Fix adjusted filters don't take effect.

---------
This commit is contained in:
Jun Zhang
2024-12-08 13:32:29 +08:00
committed by GitHub
parent 0fc29304a8
commit e55f43c737
120 changed files with 844 additions and 5810 deletions

View File

@@ -51,7 +51,7 @@ public class UserRepositoryImpl implements UserRepository {
@Override
public List<UserTokenDO> getUserTokenListByName(String userName) {
QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("user_name", userName);
queryWrapper.lambda().eq(UserTokenDO::getUserName, userName);
return userTokenDOMapper.selectList(queryWrapper);
}
@@ -68,7 +68,7 @@ public class UserRepositoryImpl implements UserRepository {
@Override
public void deleteUserTokenByName(String userName) {
QueryWrapper<UserTokenDO> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("user_name", userName);
queryWrapper.lambda().eq(UserTokenDO::getUserName, userName);
userTokenDOMapper.delete(queryWrapper);
}

View File

@@ -4,9 +4,11 @@ import javax.validation.constraints.NotNull;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class ChatMemoryUpdateReq {
@NotNull(message = "id不可为空")

View File

@@ -2,8 +2,8 @@ package com.tencent.supersonic.chat.server.executor;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.pojo.ChatContext;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.pojo.ExecuteContext;
import com.tencent.supersonic.chat.server.service.ChatContextService;
import com.tencent.supersonic.chat.server.service.MemoryService;
@@ -44,7 +44,7 @@ public class SqlExecutor implements ChatQueryExecutor {
Text2SQLExemplar.class);
MemoryService memoryService = ContextUtils.getBean(MemoryService.class);
memoryService.createMemory(ChatMemoryDO.builder()
memoryService.createMemory(ChatMemory.builder()
.agentId(executeContext.getAgent().getId()).status(MemoryStatus.PENDING)
.question(exemplar.getQuestion()).sideInfo(exemplar.getSideInfo())
.dbSchema(exemplar.getDbSchema()).s2sql(exemplar.getSql())

View File

@@ -1,9 +1,10 @@
package com.tencent.supersonic.chat.server.memory;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.AgentService;
import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.pojo.ChatApp;
@@ -66,7 +67,7 @@ public class MemoryReviewTask {
}
ChatMemoryFilter chatMemoryFilter =
ChatMemoryFilter.builder().agentId(agent.getId()).build();
memoryService.getMemories(chatMemoryFilter).stream().forEach(memory -> {
memoryService.getMemories(chatMemoryFilter).forEach(memory -> {
try {
processMemory(memory, agent);
} catch (Exception e) {
@@ -77,23 +78,19 @@ public class MemoryReviewTask {
}
}
private void processMemory(ChatMemoryDO m, Agent agent) {
private void processMemory(ChatMemory m, Agent agent) {
if (Objects.isNull(agent)) {
log.warn("Agent id {} not found or memory review disabled", m.getAgentId());
return;
}
ChatApp chatApp = agent.getChatAppConfig().get(APP_KEY);
if (Objects.isNull(chatApp) || !chatApp.isEnable()) {
// if either LLM or human has reviewed, just return
if (Objects.nonNull(m.getLlmReviewRet()) || Objects.nonNull(m.getHumanReviewRet())) {
return;
}
// 如果大模型已经评估过,则不再评估
if (Objects.nonNull(m.getLlmReviewRet())) {
// directly enable memory if the LLM determines it positive
if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) {
memoryService.enableMemory(m);
}
ChatApp chatApp = agent.getChatAppConfig().get(APP_KEY);
if (Objects.isNull(chatApp) || !chatApp.isEnable()) {
return;
}
@@ -112,19 +109,19 @@ public class MemoryReviewTask {
}
}
private String createPromptString(ChatMemoryDO m, String promptTemplate) {
private String createPromptString(ChatMemory m, String promptTemplate) {
return String.format(promptTemplate, m.getQuestion(), m.getDbSchema(), m.getSideInfo(),
m.getS2sql());
}
private void processResponse(String response, ChatMemoryDO m) {
private void processResponse(String response, ChatMemory m) {
Matcher matcher = OUTPUT_PATTERN.matcher(response);
if (matcher.find()) {
m.setLlmReviewRet(MemoryReviewResult.getMemoryReviewResult(matcher.group(1)));
m.setLlmReviewCmt(matcher.group(2));
// directly enable memory if the LLM determines it positive
if (MemoryReviewResult.POSITIVE.equals(m.getLlmReviewRet())) {
memoryService.enableMemory(m);
m.setStatus(MemoryStatus.ENABLED);
}
memoryService.updateMemory(m);
}

View File

@@ -91,6 +91,7 @@ public class NL2SQLParser implements ChatQueryParser {
// mapModes
Set<Long> requestedDatasets = queryNLReq.getDataSetIds();
List<SemanticParseInfo> candidateParses = Lists.newArrayList();
StringBuilder errMsg = new StringBuilder();
for (Long datasetId : requestedDatasets) {
queryNLReq.setDataSetIds(Collections.singleton(datasetId));
ChatParseResp parseResp = new ChatParseResp(parseContext.getRequest().getQueryId());
@@ -104,6 +105,7 @@ public class NL2SQLParser implements ChatQueryParser {
doParse(queryNLReq, parseResp);
}
if (parseResp.getSelectedParses().isEmpty()) {
errMsg.append(parseResp.getErrorMsg());
continue;
}
// for one dataset select the top 1 parse after sorting
@@ -116,6 +118,10 @@ public class NL2SQLParser implements ChatQueryParser {
SemanticParseInfo.sort(candidateParses);
parseContext.getResponse().setSelectedParses(
candidateParses.subList(0, Math.min(parserShowCount, candidateParses.size())));
if (parseContext.getResponse().getSelectedParses().isEmpty()) {
parseContext.getResponse().setState(ParseResp.ParseState.FAILED);
parseContext.getResponse().setErrorMsg(errMsg.toString());
}
}
// next go with llm-based parsers unless LLM is disabled or use feedback is needed.

View File

@@ -4,17 +4,17 @@ import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.ToString;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@Builder
@ToString
@NoArgsConstructor
@AllArgsConstructor
@TableName("s2_chat_memory")
public class ChatMemoryDO {
@TableId(type = IdType.AUTO)
@@ -36,16 +36,16 @@ public class ChatMemoryDO {
private String s2sql;
@TableField("status")
private MemoryStatus status;
private String status;
@TableField("llm_review")
private MemoryReviewResult llmReviewRet;
private String llmReviewRet;
@TableField("llm_comment")
private String llmReviewCmt;
@TableField("human_review")
private MemoryReviewResult humanReviewRet;
private String humanReviewRet;
@TableField("human_comment")
private String humanReviewCmt;

View File

@@ -20,7 +20,6 @@ import com.tencent.supersonic.chat.server.persistence.repository.ChatQueryReposi
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.common.util.PageUtils;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.api.pojo.response.ParseTimeCostResp;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;

View File

@@ -0,0 +1,48 @@
package com.tencent.supersonic.chat.server.pojo;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.util.Date;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class ChatMemory {
private Long id;
private Integer agentId;
private String question;
private String sideInfo;
private String dbSchema;
private String s2sql;
private MemoryStatus status;
private MemoryReviewResult llmReviewRet;
private String llmReviewCmt;
private MemoryReviewResult humanReviewRet;
private String humanReviewCmt;
private String createdBy;
private Date createdAt;
private String updatedBy;
private Date updatedAt;
}

View File

@@ -56,8 +56,7 @@ public class QueryRecommendProcessor implements ParseResultProcessor {
private void updateChatQuery(ChatQueryDO chatQueryDO) {
ChatQueryRepository chatQueryRepository = ContextUtils.getBean(ChatQueryRepository.class);
UpdateWrapper<ChatQueryDO> updateWrapper = new UpdateWrapper<>();
updateWrapper.eq("question_id", chatQueryDO.getQuestionId());
updateWrapper.set("similar_queries", chatQueryDO.getSimilarQueries());
updateWrapper.lambda().eq(ChatQueryDO::getQuestionId, chatQueryDO.getQuestionId());
chatQueryRepository.updateChatQuery(chatQueryDO, updateWrapper);
}
}

View File

@@ -9,7 +9,7 @@ import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryCreateReq;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq;
@@ -32,7 +32,7 @@ public class MemoryController {
public Boolean createMemory(@RequestBody ChatMemoryCreateReq chatMemoryCreateReq,
HttpServletRequest request, HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
memoryService.createMemory(ChatMemoryDO.builder().agentId(chatMemoryCreateReq.getAgentId())
memoryService.createMemory(ChatMemory.builder().agentId(chatMemoryCreateReq.getAgentId())
.s2sql(chatMemoryCreateReq.getS2sql()).question(chatMemoryCreateReq.getQuestion())
.dbSchema(chatMemoryCreateReq.getDbSchema()).status(chatMemoryCreateReq.getStatus())
.humanReviewRet(MemoryReviewResult.POSITIVE).createdBy(user.getName())
@@ -49,7 +49,7 @@ public class MemoryController {
}
@RequestMapping("/pageMemories")
public PageInfo<ChatMemoryDO> pageMemories(@RequestBody PageMemoryReq pageMemoryReq) {
public PageInfo<ChatMemory> pageMemories(@RequestBody PageMemoryReq pageMemoryReq) {
return memoryService.pageMemories(pageMemoryReq);
}

View File

@@ -4,27 +4,22 @@ import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.common.pojo.User;
import java.util.List;
public interface MemoryService {
void createMemory(ChatMemoryDO memory);
void createMemory(ChatMemory memory);
void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user);
void updateMemory(ChatMemoryDO memory);
void enableMemory(ChatMemoryDO memory);
void disableMemory(ChatMemoryDO memory);
void updateMemory(ChatMemory memory);
void batchDelete(List<Long> ids);
PageInfo<ChatMemoryDO> pageMemories(PageMemoryReq pageMemoryReq);
PageInfo<ChatMemory> pageMemories(PageMemoryReq pageMemoryReq);
List<ChatMemoryDO> getMemories(ChatMemoryFilter chatMemoryFilter);
List<ChatMemory> getMemories(ChatMemoryFilter chatMemoryFilter);
List<ChatMemoryDO> getMemoriesForLlmReview();
}

View File

@@ -6,8 +6,8 @@ import com.tencent.supersonic.chat.api.pojo.request.ChatParseReq;
import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.agent.VisualConfig;
import com.tencent.supersonic.chat.server.persistence.dataobject.AgentDO;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.persistence.mapper.AgentDOMapper;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.AgentService;
import com.tencent.supersonic.chat.server.service.ChatQueryService;
import com.tencent.supersonic.chat.server.service.MemoryService;
@@ -121,7 +121,7 @@ public class AgentServiceImpl extends ServiceImpl<AgentDOMapper, AgentDO> implem
ChatMemoryFilter chatMemoryFilter =
ChatMemoryFilter.builder().agentId(agent.getId()).questions(examples).build();
List<String> memoriesExisted = memoryService.getMemories(chatMemoryFilter).stream()
.map(ChatMemoryDO::getQuestion).collect(Collectors.toList());
.map(ChatMemory::getQuestion).collect(Collectors.toList());
for (String example : examples) {
if (memoriesExisted.contains(example)) {
continue;

View File

@@ -18,11 +18,7 @@ import com.tencent.supersonic.chat.server.service.ChatManageService;
import com.tencent.supersonic.chat.server.service.ChatQueryService;
import com.tencent.supersonic.chat.server.util.ComponentFactory;
import com.tencent.supersonic.chat.server.util.QueryReqConverter;
import com.tencent.supersonic.common.jsqlparser.FieldExpression;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.jsqlparser.*;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.util.DateUtils;
@@ -48,11 +44,7 @@ import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.LongValue;
import net.sf.jsqlparser.expression.StringValue;
import net.sf.jsqlparser.expression.operators.relational.ComparisonOperator;
import net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals;
import net.sf.jsqlparser.expression.operators.relational.InExpression;
import net.sf.jsqlparser.expression.operators.relational.MinorThanEquals;
import net.sf.jsqlparser.expression.operators.relational.ParenthesedExpressionList;
import net.sf.jsqlparser.expression.operators.relational.*;
import net.sf.jsqlparser.schema.Column;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
@@ -60,14 +52,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
@@ -210,20 +195,22 @@ public class ChatQueryServiceImpl implements ChatQueryService {
private void handleLLMQueryMode(ChatQueryDataReq chatQueryDataReq, SemanticQuery semanticQuery,
DataSetSchema dataSetSchema, User user) throws Exception {
SemanticParseInfo parseInfo = semanticQuery.getParseInfo();
List<String> fields = getFieldsFromSql(parseInfo);
if (checkMetricReplace(fields, chatQueryDataReq.getMetrics())) {
log.info("llm begin replace metrics!");
String rebuiltS2SQL;
if (checkMetricReplace(chatQueryDataReq, parseInfo)) {
log.info("rebuild S2SQL with adjusted metrics!");
SchemaElement metricToReplace = chatQueryDataReq.getMetrics().iterator().next();
replaceMetrics(parseInfo, metricToReplace);
rebuiltS2SQL = replaceMetrics(parseInfo, metricToReplace);
} else {
log.info("llm begin revise filters!");
String correctorSql = reviseCorrectS2SQL(chatQueryDataReq, parseInfo, dataSetSchema);
parseInfo.getSqlInfo().setCorrectedS2SQL(correctorSql);
semanticQuery.setParseInfo(parseInfo);
SemanticQueryReq semanticQueryReq = semanticQuery.buildSemanticQueryReq();
SemanticTranslateResp explain = semanticLayerService.translate(semanticQueryReq, user);
parseInfo.getSqlInfo().setQuerySQL(explain.getQuerySQL());
log.info("rebuild S2SQL with adjusted filters!");
rebuiltS2SQL = replaceFilters(chatQueryDataReq, parseInfo, dataSetSchema);
}
// reset SqlInfo and request re-translation
parseInfo.getSqlInfo().setCorrectedS2SQL(rebuiltS2SQL);
parseInfo.getSqlInfo().setParsedS2SQL(rebuiltS2SQL);
parseInfo.getSqlInfo().setQuerySQL(null);
SemanticQueryReq semanticQueryReq = semanticQuery.buildSemanticQueryReq();
SemanticTranslateResp explain = semanticLayerService.translate(semanticQueryReq, user);
parseInfo.getSqlInfo().setQuerySQL(explain.getQuerySQL());
}
private void handleRuleQueryMode(SemanticQuery semanticQuery, DataSetSchema dataSetSchema,
@@ -243,7 +230,9 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return queryResult;
}
private boolean checkMetricReplace(List<String> oriFields, Set<SchemaElement> metrics) {
private boolean checkMetricReplace(ChatQueryDataReq chatQueryDataReq, SemanticParseInfo parseInfo) {
List<String> oriFields = getFieldsFromSql(parseInfo);
Set<SchemaElement> metrics = chatQueryDataReq.getMetrics();
if (CollectionUtils.isEmpty(oriFields) || CollectionUtils.isEmpty(metrics)) {
return false;
}
@@ -252,8 +241,8 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return !oriFields.containsAll(metricNames);
}
private String reviseCorrectS2SQL(ChatQueryDataReq queryData, SemanticParseInfo parseInfo,
DataSetSchema dataSetSchema) {
private String replaceFilters(ChatQueryDataReq queryData, SemanticParseInfo parseInfo,
DataSetSchema dataSetSchema) {
String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL();
log.info("correctorSql before replacing:{}", correctorSql);
// get where filter and having filter
@@ -290,7 +279,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
return correctorSql;
}
private void replaceMetrics(SemanticParseInfo parseInfo, SchemaElement metric) {
private String replaceMetrics(SemanticParseInfo parseInfo, SchemaElement metric) {
List<String> oriMetrics = parseInfo.getMetrics().stream().map(SchemaElement::getName)
.collect(Collectors.toList());
String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL();
@@ -302,7 +291,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
correctorSql = SqlReplaceHelper.replaceAggFields(correctorSql, fieldMap);
}
log.info("after replaceMetrics:{}", correctorSql);
parseInfo.getSqlInfo().setCorrectedS2SQL(correctorSql);
return correctorSql;
}
private QueryResult doExecution(SemanticQueryReq semanticQueryReq, String queryMode, User user)
@@ -477,6 +466,9 @@ public class ChatQueryServiceImpl implements ChatQueryService {
}
private void mergeParseInfo(SemanticParseInfo parseInfo, ChatQueryDataReq queryData) {
if (Objects.nonNull(queryData.getDateInfo())) {
parseInfo.setDateInfo(queryData.getDateInfo());
}
if (LLMSqlQuery.QUERY_MODE.equals(parseInfo.getQueryMode())) {
return;
}
@@ -492,9 +484,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
if (!CollectionUtils.isEmpty(queryData.getMetricFilters())) {
parseInfo.setMetricFilters(queryData.getMetricFilters());
}
if (Objects.nonNull(queryData.getDateInfo())) {
parseInfo.setDateInfo(queryData.getDateInfo());
}
parseInfo.setSqlInfo(new SqlInfo());
}

View File

@@ -3,12 +3,14 @@ package com.tencent.supersonic.chat.server.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryReviewResult;
import com.tencent.supersonic.chat.api.pojo.enums.MemoryStatus;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryFilter;
import com.tencent.supersonic.chat.api.pojo.request.ChatMemoryUpdateReq;
import com.tencent.supersonic.chat.api.pojo.request.PageMemoryReq;
import com.tencent.supersonic.chat.server.persistence.dataobject.ChatMemoryDO;
import com.tencent.supersonic.chat.server.persistence.repository.ChatMemoryRepository;
import com.tencent.supersonic.chat.server.pojo.ChatMemory;
import com.tencent.supersonic.chat.server.service.MemoryService;
import com.tencent.supersonic.common.config.EmbeddingConfig;
import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
@@ -16,12 +18,15 @@ import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.service.ExemplarService;
import com.tencent.supersonic.common.util.BeanMapper;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Service
public class MemoryServiceImpl implements MemoryService {
@@ -36,20 +41,22 @@ public class MemoryServiceImpl implements MemoryService {
private EmbeddingConfig embeddingConfig;
@Override
public void createMemory(ChatMemoryDO memory) {
public void createMemory(ChatMemory memory) {
// if an existing enabled memory has the same question, just skip
List<ChatMemoryDO> memories =
List<ChatMemory> memories =
getMemories(ChatMemoryFilter.builder().agentId(memory.getAgentId())
.question(memory.getQuestion()).status(MemoryStatus.ENABLED).build());
if (memories.size() == 0) {
chatMemoryRepository.createMemory(memory);
if (memories.isEmpty()) {
ChatMemoryDO memoryDO = getMemoryDO(memory);
chatMemoryRepository.createMemory(memoryDO);
}
}
@Override
public void updateMemory(ChatMemoryUpdateReq chatMemoryUpdateReq, User user) {
ChatMemoryDO chatMemoryDO = chatMemoryRepository.getMemory(chatMemoryUpdateReq.getId());
boolean hadEnabled = MemoryStatus.ENABLED.equals(chatMemoryDO.getStatus());
boolean hadEnabled =
MemoryStatus.ENABLED.toString().equals(chatMemoryDO.getStatus().trim());
chatMemoryDO.setUpdatedBy(user.getName());
chatMemoryDO.setUpdatedAt(new Date());
BeanMapper.mapper(chatMemoryUpdateReq, chatMemoryDO);
@@ -58,12 +65,12 @@ public class MemoryServiceImpl implements MemoryService {
} else if (MemoryStatus.DISABLED.equals(chatMemoryUpdateReq.getStatus()) && hadEnabled) {
disableMemory(chatMemoryDO);
}
updateMemory(chatMemoryDO);
chatMemoryRepository.updateMemory(chatMemoryDO);
}
@Override
public void updateMemory(ChatMemoryDO memory) {
chatMemoryRepository.updateMemory(memory);
public void updateMemory(ChatMemory memory) {
chatMemoryRepository.updateMemory(getMemoryDO(memory));
}
@Override
@@ -72,7 +79,7 @@ public class MemoryServiceImpl implements MemoryService {
}
@Override
public PageInfo<ChatMemoryDO> pageMemories(PageMemoryReq pageMemoryReq) {
public PageInfo<ChatMemory> pageMemories(PageMemoryReq pageMemoryReq) {
ChatMemoryFilter chatMemoryFilter = pageMemoryReq.getChatMemoryFilter();
chatMemoryFilter.setSort(pageMemoryReq.getSort());
chatMemoryFilter.setOrderCondition(pageMemoryReq.getOrderCondition());
@@ -81,7 +88,7 @@ public class MemoryServiceImpl implements MemoryService {
}
@Override
public List<ChatMemoryDO> getMemories(ChatMemoryFilter chatMemoryFilter) {
public List<ChatMemory> getMemories(ChatMemoryFilter chatMemoryFilter) {
QueryWrapper<ChatMemoryDO> queryWrapper = new QueryWrapper<>();
if (chatMemoryFilter.getAgentId() != null) {
queryWrapper.lambda().eq(ChatMemoryDO::getAgentId, chatMemoryFilter.getAgentId());
@@ -109,32 +116,52 @@ public class MemoryServiceImpl implements MemoryService {
queryWrapper.orderBy(true, chatMemoryFilter.isAsc(),
chatMemoryFilter.getOrderCondition());
}
return chatMemoryRepository.getMemories(queryWrapper);
List<ChatMemoryDO> chatMemoryDOS = chatMemoryRepository.getMemories(queryWrapper);
return chatMemoryDOS.stream().map(this::getMemory).collect(Collectors.toList());
}
@Override
public List<ChatMemoryDO> getMemoriesForLlmReview() {
QueryWrapper<ChatMemoryDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().eq(ChatMemoryDO::getStatus, MemoryStatus.PENDING)
.isNull(ChatMemoryDO::getLlmReviewRet);
return chatMemoryRepository.getMemories(queryWrapper);
}
@Override
public void enableMemory(ChatMemoryDO memory) {
memory.setStatus(MemoryStatus.ENABLED);
private void enableMemory(ChatMemoryDO memory) {
memory.setStatus(MemoryStatus.ENABLED.toString());
exemplarService.storeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()),
Text2SQLExemplar.builder().question(memory.getQuestion())
.sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema())
.sql(memory.getS2sql()).build());
}
@Override
public void disableMemory(ChatMemoryDO memory) {
memory.setStatus(MemoryStatus.DISABLED);
private void disableMemory(ChatMemoryDO memory) {
memory.setStatus(MemoryStatus.DISABLED.toString());
exemplarService.removeExemplar(embeddingConfig.getMemoryCollectionName(memory.getAgentId()),
Text2SQLExemplar.builder().question(memory.getQuestion())
.sideInfo(memory.getSideInfo()).dbSchema(memory.getDbSchema())
.sql(memory.getS2sql()).build());
}
private ChatMemoryDO getMemoryDO(ChatMemory memory) {
ChatMemoryDO memoryDO = new ChatMemoryDO();
BeanUtils.copyProperties(memory, memoryDO);
memoryDO.setStatus(memory.getStatus().toString().trim());
if (Objects.nonNull(memory.getHumanReviewRet())) {
memoryDO.setHumanReviewRet(memory.getHumanReviewRet().toString().trim());
}
if (Objects.nonNull(memory.getLlmReviewRet())) {
memoryDO.setLlmReviewRet(memory.getLlmReviewRet().toString().trim());
}
return memoryDO;
}
private ChatMemory getMemory(ChatMemoryDO memoryDO) {
ChatMemory memory = new ChatMemory();
BeanUtils.copyProperties(memoryDO, memory);
memory.setStatus(MemoryStatus.valueOf(memoryDO.getStatus().trim()));
if (Objects.nonNull(memoryDO.getHumanReviewRet())) {
memory.setHumanReviewRet(
MemoryReviewResult.valueOf(memoryDO.getHumanReviewRet().trim()));
}
if (Objects.nonNull(memoryDO.getLlmReviewRet())) {
memory.setLlmReviewRet(MemoryReviewResult.valueOf(memoryDO.getLlmReviewRet().trim()));
}
return memory;
}
}

View File

@@ -19,19 +19,6 @@ public class Term {
this.nature = nature;
}
public Term(String word, Nature nature, int offset) {
this.word = word;
this.nature = nature;
this.offset = offset;
}
public Term(String word, Nature nature, int offset, int frequency) {
this.word = word;
this.nature = nature;
this.offset = offset;
this.frequency = frequency;
}
public int length() {
return this.word.length();
}

View File

@@ -38,14 +38,14 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
new Parameter("s2.embedding.store.timeout", "60", "超时时间(秒)", "", "number", MODULE_NAME);
public static final Parameter EMBEDDING_STORE_DIMENSION =
new Parameter("s2.embedding.store.dimension", "", "", "", "number", MODULE_NAME, null,
getDimensionDependency());
new Parameter("s2.embedding.store.dimension", "", "向量维", "", "number", MODULE_NAME,
null, getDimensionDependency());
public static final Parameter EMBEDDING_STORE_DATABASE_NAME =
new Parameter("s2.embedding.store.databaseName", "", "DatabaseName", "", "string",
MODULE_NAME, null, getDatabaseNameDependency());
public static final Parameter EMBEDDING_STORE_POST = new Parameter("s2.embedding.store.post",
"", "端口", "", "number", MODULE_NAME, null, getPostDependency());
public static final Parameter EMBEDDING_STORE_POST = new Parameter("s2.embedding.store.port",
"", "端口", "", "number", MODULE_NAME, null, getPortDependency());
public static final Parameter EMBEDDING_STORE_USER = new Parameter("s2.embedding.store.user",
"", "用户名", "", "string", MODULE_NAME, null, getUserDependency());
@@ -101,10 +101,8 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
private static List<Parameter.Dependency> getApiKeyDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), DEMO,
EmbeddingStoreType.PGVECTOR.name(), DEMO));
Lists.newArrayList(EmbeddingStoreType.MILVUS.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), DEMO));
}
private static List<Parameter.Dependency> getPathDependency() {
@@ -118,7 +116,7 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "384",
EmbeddingStoreType.PGVECTOR.name(), "768"));
EmbeddingStoreType.PGVECTOR.name(), "512"));
}
private static List<Parameter.Dependency> getDatabaseNameDependency() {
@@ -129,7 +127,7 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
}
private static List<Parameter.Dependency> getPostDependency() {
private static List<Parameter.Dependency> getPortDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.PGVECTOR.name(), "54333"));
@@ -140,12 +138,14 @@ public class EmbeddingStoreParameterConfig extends ParameterConfig {
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "pgvector"));
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
}
private static List<Parameter.Dependency> getPasswordDependency() {
return getDependency(EMBEDDING_STORE_PROVIDER.getName(),
Lists.newArrayList(EmbeddingStoreType.MILVUS.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus"));
Lists.newArrayList(EmbeddingStoreType.MILVUS.name(),
EmbeddingStoreType.PGVECTOR.name()),
ImmutableMap.of(EmbeddingStoreType.MILVUS.name(), "milvus",
EmbeddingStoreType.PGVECTOR.name(), "postgres"));
}
}

View File

@@ -8,7 +8,6 @@ import lombok.Data;
@Builder
public class DataItem {
/** * This field uses an underscore (_) at the end. */
private String id;
private String bizName;
@@ -19,9 +18,10 @@ public class DataItem {
private TypeEnums type;
/** * This field uses an underscore (_) at the end. */
private String modelId;
private String domainId;
private String defaultAgg;
public String getNewName() {

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.common.pojo;
import com.google.common.collect.Lists;
import lombok.Data;
import java.util.List;
@@ -18,5 +19,5 @@ public class ModelRela extends RecordInfo {
// left join, inner join, right join, outer join
private String joinType;
private List<JoinCondition> joinConditions;
private List<JoinCondition> joinConditions = Lists.newArrayList();
}

View File

@@ -1,5 +1,5 @@
package com.tencent.supersonic.common.pojo.enums;
public enum TypeEnums {
METRIC, DIMENSION, TAG_OBJECT, TAG, DOMAIN, DATASET, MODEL, UNKNOWN
METRIC, DIMENSION, TAG, DOMAIN, DATASET, MODEL, UNKNOWN
}

View File

@@ -26,7 +26,7 @@ public class PgvectorEmbeddingStoreFactory extends BaseEmbeddingStoreFactory {
embeddingStore.setPort(storeConfig.getPost());
embeddingStore.setDatabase(storeConfig.getDatabaseName());
embeddingStore.setUser(storeConfig.getUser());
embeddingStore.setPassword(storeConfig.getApiKey());
embeddingStore.setPassword(storeConfig.getPassword());
return embeddingStore;
}

View File

@@ -1,6 +1,7 @@
package dev.langchain4j.store.embedding;
import com.alibaba.fastjson.JSONObject;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataItem;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.segment.TextSegment;
@@ -17,10 +18,18 @@ public class TextSegmentConvert {
public static final String QUERY_ID = "queryId";
public static List<TextSegment> convertToEmbedding(List<DataItem> dataItems) {
return dataItems.stream().map(dataItem -> {
Map meta = JSONObject.parseObject(JSONObject.toJSONString(dataItem), Map.class);
TextSegment textSegment = TextSegment.from(dataItem.getName(), new Metadata(meta));
addQueryId(textSegment, dataItem.getId() + dataItem.getType().name().toLowerCase());
return dataItems.stream().map(item -> {
// suffix with underscore to avoid embedding issue
DataItem newItem = DataItem.builder().domainId(item.getDomainId())
.bizName(item.getBizName()).type(item.getType()).newName(item.getNewName())
.defaultAgg(item.getDefaultAgg()).name(item.getName())
.id(item.getId() + Constants.UNDERLINE)
.modelId(item.getModelId() + Constants.UNDERLINE)
.domainId(item.getDomainId() + Constants.UNDERLINE).build();
Map meta = JSONObject.parseObject(JSONObject.toJSONString(newItem), Map.class);
TextSegment textSegment = TextSegment.from(newItem.getName(), new Metadata(meta));
addQueryId(textSegment, newItem.getId() + newItem.getType().name().toLowerCase());
return textSegment;
}).collect(Collectors.toList());
}

View File

@@ -7,14 +7,13 @@ WORKDIR /usr/src/app
# Argument to pass in the supersonic version at build time
ARG SUPERSONIC_VERSION
# Install necessary packages, including MySQL client
RUN apt-get update && \
apt-get install -y default-mysql-client unzip && \
rm -rf /var/lib/apt/lists/*
RUN apt-get update
# Install necessary packages, including Postgres client
RUN apt-get update && apt-get install -y postgresql-client
# Install the vim editor.
RUN apt-get update && \
apt-get install -y vim && \
RUN apt-get update && apt-get install -y vim && \
rm -rf /var/lib/apt/lists/*
# Update the package list and install iputils-ping.
@@ -40,4 +39,4 @@ WORKDIR /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION}
EXPOSE 9080
# Command to run the supersonic daemon
RUN chmod +x bin/supersonic-daemon.sh
CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone prd && tail -f /dev/null"]
CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone docker && tail -f /dev/null"]

32
docker/DockerfileS2 Normal file
View File

@@ -0,0 +1,32 @@
# Use an official OpenJDK runtime as a parent image
FROM supersonicbi/supersonic:0.9.8
# Set the working directory in the container
WORKDIR /usr/src/app
# Argument to pass in the supersonic version at build time
ARG SUPERSONIC_VERSION
# Install necessary packages, including Postgres client
RUN apt-get install -y postgresql-client
RUN rm /usr/src/app/supersonic-standalone-latest
# Copy the supersonic standalone zip file into the container
COPY assembly/build/supersonic-standalone-${SUPERSONIC_VERSION}.zip .
# Unzip the supersonic standalone zip
RUN unzip supersonic-standalone-${SUPERSONIC_VERSION}.zip && \
rm supersonic-standalone-${SUPERSONIC_VERSION}.zip
# Create a symbolic link to the supersonic installation directory
RUN ln -s /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION} /usr/src/app/supersonic-standalone-latest
# Set the working directory to the supersonic installation directory
WORKDIR /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION}
# Expose the default port
EXPOSE 9080
# Command to run the supersonic daemon
RUN chmod +x bin/supersonic-daemon.sh
CMD ["bash", "-c", "bin/supersonic-daemon.sh restart standalone docker && tail -f /dev/null"]

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env bash
SUPERSONIC_VERSION=0.9.10-SNAPSHOT docker-compose -f docker-compose.yml -p supersonic up

View File

@@ -1,51 +1,28 @@
services:
chroma:
image: chromadb/chroma:0.5.3
postgres:
image: pgvector/pgvector:pg17
privileged: true
container_name: supersonic_chroma
ports:
- "8000:8000"
volumes:
- chroma_data:/chroma
networks:
- supersonic_network
dns:
- 114.114.114.114
- 8.8.8.8
- 8.8.4.4
healthcheck:
test: ["CMD", "curl", "http://0.0.0.0:8000"]
interval: 10s
timeout: 5s
retries: 10
mysql:
image: mysql:8.0
privileged: true
container_name: supersonic_mysql
container_name: supersonic_postgres
environment:
LANG: 'C.UTF-8' # 设置环境变量
MYSQL_ROOT_PASSWORD: root_password
MYSQL_DATABASE: supersonic_db
MYSQL_USER: supersonic_user
MYSQL_PASSWORD: supersonic_password
POSTGRES_ROOT_PASSWORD: root_password
POSTGRES_DATABASE: postgres
POSTGRES_USER: supersonic_user
POSTGRES_PASSWORD: supersonic_password
ports:
- "13306:3306"
- "15432:5432"
volumes:
- mysql_data:/var/lib/mysql
- postgres_data:/var/lib/postgresql
networks:
- supersonic_network
dns:
- 114.114.114.114
- 8.8.8.8
- 8.8.4.4
depends_on:
chroma:
condition: service_healthy
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
interval: 10s
timeout: 5s
test: ["CMD-SHELL", "sh -c 'pg_isready -U supersonic_user -d postgres'"]
interval: 30s
timeout: 10s
retries: 5
db_init:
@@ -53,22 +30,21 @@ services:
privileged: true
container_name: supersonic_db_init
depends_on:
mysql:
postgres:
condition: service_healthy
networks:
- supersonic_network
command: >
sh -c "
sleep 15 &&
if ! mysql -h supersonic_mysql -usupersonic_user -psupersonic_password -e 'use supersonic_db; show tables;' | grep -q 's2_database'; then
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-mysql.sql &&
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-mysql-demo.sql &&
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-mysql.sql &&
mysql -h supersonic_mysql -usupersonic_user -psupersonic_password supersonic_db < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-mysql-demo.sql
else
echo 'Database already initialized.'
fi
"
if ! PGPASSWORD=supersonic_password psql -h supersonic_postgres -U supersonic_user -d postgres -c 'select * from s2_database limit 1' > /dev/null;
then
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/schema-postgres-demo.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres.sql
PGPASSWORD=supersonic_password psql -hsupersonic_postgres -U supersonic_user -d postgres < /usr/src/app/supersonic-standalone-${SUPERSONIC_VERSION:-latest}/conf/db/data-postgres-demo.sql
else
echo 'Database already initialized.'
fi"
dns:
- 114.114.114.114
- 8.8.8.8
@@ -79,17 +55,14 @@ services:
privileged: true
container_name: supersonic_standalone
environment:
DB_HOST: supersonic_mysql
DB_NAME: supersonic_db
DB_HOST: supersonic_postgres
DB_NAME: postgres
DB_USERNAME: supersonic_user
DB_PASSWORD: supersonic_password
CHROMA_HOST: supersonic_chroma
ports:
- "9080:9080"
depends_on:
chroma:
condition: service_healthy
mysql:
postgres:
condition: service_healthy
db_init:
condition: service_completed_successfully
@@ -112,8 +85,7 @@ services:
# propagation: rprivate
# create_host_path: true
volumes:
mysql_data:
chroma_data:
postgres_data:
supersonic_data:
networks:

View File

@@ -15,7 +15,7 @@ public class ColumnSchema {
private FieldType filedType;
private AggOperatorEnum agg;
private AggOperatorEnum agg = AggOperatorEnum.SUM;
private String name;

View File

@@ -9,7 +9,7 @@ import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Dim {
public class Dimension {
private String name;
@@ -27,16 +27,14 @@ public class Dim {
private String description;
private int isTag;
public Dim(String name, String bizName, DimensionType type, Integer isCreateDimension) {
public Dimension(String name, String bizName, DimensionType type, Integer isCreateDimension) {
this.name = name;
this.type = type;
this.isCreateDimension = isCreateDimension;
this.bizName = bizName;
}
public Dim(String name, String bizName, DimensionType type, Integer isCreateDimension,
public Dimension(String name, String bizName, DimensionType type, Integer isCreateDimension,
String expr, String dateFormat, DimensionTimeTypeParams typeParams) {
this.name = name;
this.type = type;
@@ -47,8 +45,8 @@ public class Dim {
this.bizName = bizName;
}
public static Dim getDefault() {
return new Dim("数据日期", "imp_date", DimensionType.partition_time, 0, "imp_date",
public static Dimension getDefault() {
return new Dimension("数据日期", "imp_date", DimensionType.partition_time, 0, "imp_date",
Constants.DAY_FORMAT, new DimensionTimeTypeParams("false", "day"));
}

View File

@@ -24,7 +24,7 @@ public class ModelDetail {
private List<Identify> identifiers = Lists.newArrayList();
private List<Dim> dimensions = Lists.newArrayList();
private List<Dimension> dimensions = Lists.newArrayList();
private List<Measure> measures = Lists.newArrayList();
@@ -39,7 +39,7 @@ public class ModelDetail {
return sqlQuery;
}
public List<Dim> filterTimeDims() {
public List<Dimension> filterTimeDims() {
if (CollectionUtils.isEmpty(dimensions)) {
return Lists.newArrayList();
}

View File

@@ -1,5 +1,5 @@
package com.tencent.supersonic.headless.api.pojo.enums;
public enum FieldType {
primary_key, foreign_key, partition_time, time, dimension, measure;
primary_key, foreign_key, partition_time, time, categorical, measure;
}

View File

@@ -32,8 +32,6 @@ public class DimensionReq extends SchemaItem {
private DataTypeEnums dataType;
private int isTag;
private Map<String, Object> ext;
private DimensionTimeTypeParams typeParams;

View File

@@ -9,6 +9,10 @@ import java.util.List;
@Data
public class ModelBuildReq {
private String name;
private String bizName;
private Long databaseId;
private Long domainId;

View File

@@ -211,7 +211,9 @@ public class QueryStructReq extends SemanticQueryReq {
SelectItem selectExpressionItem = new SelectItem(function);
String alias =
StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
selectExpressionItem.setAlias(new Alias(alias));
if (!alias.equals(columnName)) {
selectExpressionItem.setAlias(new Alias(alias));
}
return selectExpressionItem;
}

View File

@@ -18,6 +18,8 @@ public class DimensionResp extends SchemaItem {
private Long modelId;
private Long domainId;
private DimensionType type;
private String expr;

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Field;
import com.tencent.supersonic.headless.api.pojo.Identify;
@@ -62,7 +62,7 @@ public class ModelResp extends SchemaItem {
return isOpen != null && isOpen == 1;
}
public List<Dim> getTimeDimension() {
public List<Dimension> getTimeDimension() {
if (modelDetail == null) {
return Lists.newArrayList();
}

View File

@@ -19,6 +19,7 @@ public class DictWord {
private String word;
private String nature;
private String natureWithFrequency;
private String alias;
@Override
public boolean equals(Object o) {

View File

@@ -1,11 +1,14 @@
package com.tencent.supersonic.headless.chat.knowledge;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.tencent.supersonic.common.pojo.enums.DictWordType;
import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -14,6 +17,31 @@ import java.util.stream.Collectors;
@Service
@Slf4j
public class KnowledgeBaseService {
private static volatile Map<Long, List<DictWord>> dimValueAliasMap = new HashMap<>();
public static Map<Long, List<DictWord>> getDimValueAlias() {
return dimValueAliasMap;
}
public static List<DictWord> addDimValueAlias(Long dimId, List<DictWord> newWords) {
List<DictWord> dimValueAlias =
dimValueAliasMap.containsKey(dimId) ? dimValueAliasMap.get(dimId)
: new ArrayList<>();
Set<String> wordSet =
dimValueAlias
.stream().map(word -> String.format("%s_%s_%s",
word.getNatureWithFrequency(), word.getWord(), word.getAlias()))
.collect(Collectors.toSet());
for (DictWord dictWord : newWords) {
String key = String.format("%s_%s_%s", dictWord.getNatureWithFrequency(),
dictWord.getWord(), dictWord.getAlias());
if (!wordSet.contains(key)) {
dimValueAlias.add(dictWord);
}
}
dimValueAliasMap.put(dimId, dimValueAlias);
return dimValueAlias;
}
public void updateSemanticKnowledge(List<DictWord> natures) {
@@ -41,6 +69,11 @@ public class KnowledgeBaseService {
}
// 2. update online knowledge
if (CollectionUtils.isNotEmpty(dimValueAliasMap)) {
for (Long dimId : dimValueAliasMap.keySet()) {
natures.addAll(dimValueAliasMap.get(dimId));
}
}
updateOnlineKnowledge(natures);
}

View File

@@ -12,6 +12,8 @@ import com.hankcs.hanlp.dictionary.other.CharTable;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.utility.LexiconUtility;
import com.hankcs.hanlp.utility.TextUtility;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.DictWordType;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import java.io.BufferedOutputStream;
@@ -103,7 +105,22 @@ public class MultiCustomDictionary extends DynamicCustomDictionary {
String word = getWordBySpace(param[0]);
if (isLetters) {
original = word;
word = word.toLowerCase();
// word = word.toLowerCase();
// 加入小写别名
if (!original.equals(word.toLowerCase())) {
DictWord dictWord = new DictWord();
String nature = param[1];
dictWord.setNatureWithFrequency(
String.format("%s " + Constants.DEFAULT_FREQUENCY, nature));
dictWord.setWord(word);
dictWord.setAlias(word.toLowerCase());
String[] split = nature.split(DictWordType.NATURE_SPILT);
if (split.length >= 2) {
Long dimId = Long.parseLong(
nature.split(DictWordType.NATURE_SPILT)[split.length - 1]);
KnowledgeBaseService.addDimValueAlias(dimId, Arrays.asList(dictWord));
}
}
}
if (natureCount == 0) {
attribute = new CoreDictionary.Attribute(defaultNature);

View File

@@ -8,12 +8,15 @@ import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo;
import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.knowledge.DatabaseMapResult;
import com.tencent.supersonic.headless.chat.knowledge.DictWord;
import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult;
import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService;
import com.tencent.supersonic.headless.chat.knowledge.builder.BaseWordBuilder;
import com.tencent.supersonic.headless.chat.knowledge.helper.HanlpHelper;
import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.chat.utils.EditDistanceUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
import java.util.HashSet;
@@ -83,12 +86,32 @@ public class KeywordMapper extends BaseMapper {
.element(element).frequency(frequency).word(hanlpMapResult.getName())
.similarity(hanlpMapResult.getSimilarity())
.detectWord(hanlpMapResult.getDetectWord()).build();
// doDimValueAliasLogic 将维度值别名进行替换成真实维度值
doDimValueAliasLogic(schemaElementMatch);
addToSchemaMap(chatQueryContext.getMapInfo(), dataSetId, schemaElementMatch);
}
}
}
private void doDimValueAliasLogic(SchemaElementMatch schemaElementMatch) {
SchemaElement element = schemaElementMatch.getElement();
if (SchemaElementType.VALUE.equals(element.getType())) {
Long dimId = element.getId();
String word = schemaElementMatch.getWord();
Map<Long, List<DictWord>> dimValueAlias = KnowledgeBaseService.getDimValueAlias();
if (Objects.nonNull(dimId) && StringUtils.isNotEmpty(word)
&& dimValueAlias.containsKey(dimId)) {
Map<String, DictWord> aliasAndDictMap = dimValueAlias.get(dimId).stream()
.collect(Collectors.toMap(dictWord -> dictWord.getAlias(),
dictWord -> dictWord, (v1, v2) -> v2));
if (aliasAndDictMap.containsKey(word)) {
String wordTech = aliasAndDictMap.get(word).getWord();
schemaElementMatch.setWord(wordTech);
}
}
}
}
private void convertMapResultToMapInfo(ChatQueryContext chatQueryContext,
List<DatabaseMapResult> mapResults) {
for (DatabaseMapResult match : mapResults) {

View File

@@ -0,0 +1,37 @@
package com.tencent.supersonic.headless.chat.mapper;
import com.tencent.supersonic.common.pojo.enums.Text2SQLType;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Slf4j
public class TimeFieldMapper extends BaseMapper {
@Override
public void doMap(ChatQueryContext chatQueryContext) {
if (chatQueryContext.getRequest().getText2SQLType().equals(Text2SQLType.ONLY_RULE)) {
return;
}
Map<Long, DataSetSchema> schemaMap =
chatQueryContext.getSemanticSchema().getDataSetSchemaMap();
for (Map.Entry<Long, DataSetSchema> entry : schemaMap.entrySet()) {
List<SchemaElement> timeDims = entry.getValue().getDimensions().stream()
.filter(dim -> dim.getTimeFormat() != null).collect(Collectors.toList());
for (SchemaElement schemaElement : timeDims) {
chatQueryContext.getMapInfo().getMatchedElements(entry.getKey())
.add(SchemaElementMatch.builder().word(schemaElement.getName())
.element(schemaElement).detectWord(schemaElement.getName())
.similarity(1.0).build());
}
}
}
}

View File

@@ -84,7 +84,7 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
return connection.getMetaData();
}
protected static FieldType classifyColumnType(String typeName) {
public FieldType classifyColumnType(String typeName) {
switch (typeName.toUpperCase()) {
case "INT":
case "INTEGER":
@@ -101,7 +101,7 @@ public abstract class BaseDbAdaptor implements DbAdaptor {
case "TIMESTAMP":
return FieldType.time;
default:
return FieldType.dimension;
return FieldType.categorical;
}
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.adaptor.db;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
import java.sql.SQLException;
@@ -19,4 +20,6 @@ public interface DbAdaptor {
List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
throws SQLException;
FieldType classifyColumnType(String typeName);
}

View File

@@ -114,7 +114,8 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
return dbColumns;
}
protected static FieldType classifyColumnType(String typeName) {
@Override
public FieldType classifyColumnType(String typeName) {
switch (typeName.toUpperCase()) {
case "INT":
case "INTEGER":
@@ -141,7 +142,7 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
case "CHARACTER":
case "UUID":
default:
return FieldType.dimension;
return FieldType.categorical;
}
}

View File

@@ -61,13 +61,18 @@ public class SqlQueryConverter implements QueryConverter {
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions);
ontologyQueryParam.setAggOption(aggOption);
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption));
AggOption sqlQueryAggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
// if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(ontologyQueryParam.getAggOption()));
queryStatement.setOntologyQueryParam(ontologyQueryParam);
generateDerivedMetric(sqlGenerateUtils, queryStatement);

View File

@@ -57,7 +57,11 @@ public class StructQueryConverter implements QueryConverter {
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
if (ontologyQueryParam.getMetrics().isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))

View File

@@ -26,8 +26,8 @@ public class DataModelNode extends SemanticNode {
sqlTable = dataModel.getSqlQuery();
} else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) {
if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = Arrays.stream(dataModel.getTableQuery().split("\\."))
.collect(Collectors.joining(".public."));
String fullTableName =
String.join(".public.", dataModel.getTableQuery().split("\\."));
sqlTable = "select * from " + fullTableName;
} else {
sqlTable = "select * from " + dataModel.getTableQuery();
@@ -64,7 +64,7 @@ public class DataModelNode extends SemanticNode {
for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString()));
identifiers.forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName());
}
for (Identify i : datasource.getIdentifiers()) {
@@ -73,7 +73,7 @@ public class DataModelNode extends SemanticNode {
for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers =
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> {
identifiers.forEach(i -> {
if (!dimensions.contains(i.toString())) {
metrics.add(i.toString());
}
@@ -127,7 +127,7 @@ public class DataModelNode extends SemanticNode {
}
public static String getNames(List<DataModel> dataModelList) {
return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam,
@@ -138,12 +138,12 @@ public class DataModelNode extends SemanticNode {
: d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(m -> queryMeasures.add(m));
.forEach(queryMeasures::add);
}
public static void mergeQueryFilterDimensionMeasure(Ontology ontology,
@@ -155,13 +155,13 @@ public class DataModelNode extends SemanticNode {
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType),
filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = ontology.getMetrics().stream().map(m -> m.getName())
.collect(Collectors.toSet());
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) {
ontology.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
continue;
}
@@ -196,8 +196,8 @@ public class DataModelNode extends SemanticNode {
}
// second, traverse the ontology to find other related dataModels
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, baseDataModel,
queryDimensions, queryMeasures);
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, queryParam,
baseDataModel, queryDimensions, queryMeasures);
if (CollectionUtils.isEmpty(relatedDataModels)) {
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
queryDimensions, queryMeasures);
@@ -255,7 +255,7 @@ public class DataModelNode extends SemanticNode {
.collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName)
.collect(Collectors.toSet());
baseDataModel.getIdentifiers().stream().forEach(i -> baseDimensions.add(i.getName()));
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
baseMeasures.retainAll(queryMeasures);
if (baseMeasures.size() < queryMeasures.size()) {
@@ -282,7 +282,8 @@ public class DataModelNode extends SemanticNode {
}
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
DataModel baseDataModel, Set<String> queryDimensions, Set<String> queryMeasures) {
OntologyQueryParam queryParam, DataModel baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>();
Set<String> before = new HashSet<>();
@@ -295,7 +296,7 @@ public class DataModelNode extends SemanticNode {
visitJoinRelations, sortedJoinRelation);
ontology.getJoinRelations().stream()
.filter(j -> !visitJoinRelations.contains(j.getId()))
.forEach(j -> sortedJoinRelation.add(j));
.forEach(sortedJoinRelation::add);
for (JoinRelation joinRelation : sortedJoinRelation) {
if (!before.contains(joinRelation.getLeft())
&& !before.contains(joinRelation.getRight())) {
@@ -305,13 +306,17 @@ public class DataModelNode extends SemanticNode {
boolean isRight = before.contains(joinRelation.getLeft());
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight())
: ontology.getDataModelMap().get(joinRelation.getLeft());
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
: joinRelation.getJoinCondition().get(0).getLeft();
if (!queryDimensions.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream()
.map(dd -> dd.getName()).collect(Collectors.toSet());
other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
.map(Dimension::getName).collect(Collectors.toSet());
other.getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) {
isMatch = true;
// joinDim should be added to the query dimension
queryParam.getDimensions().add(joinDimName);
}
}
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
@@ -322,7 +327,7 @@ public class DataModelNode extends SemanticNode {
}
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
.stream().map(dd -> dd.getName()).collect(Collectors.toSet());
.stream().map(Dimension::getName).collect(Collectors.toSet());
linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) {
isMatch = true;
@@ -382,15 +387,14 @@ public class DataModelNode extends SemanticNode {
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
continue;
}
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName())
.filter(i -> baseIdentifiers.contains(i)).count();
long identifierNum = entry.getValue().getIdentifiers().stream().map(Identify::getName)
.filter(baseIdentifiers::contains).count();
if (identifierNum > 0) {
boolean isMatch = false;
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream()
.map(dd -> dd.getName()).collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream()
.forEach(i -> linkDimension.add(i.getName()));
.map(Dimension::getName).collect(Collectors.toSet());
entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
@@ -398,7 +402,7 @@ public class DataModelNode extends SemanticNode {
}
if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream()
.map(mm -> mm.getName()).collect(Collectors.toSet());
.map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;

View File

@@ -15,6 +15,6 @@ public class OntologyQueryParam {
private String where;
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = false;
private AggOption aggOption = AggOption.DEFAULT;
private boolean nativeQuery = true;
private AggOption aggOption = AggOption.NATIVE;
}

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
@@ -20,11 +20,11 @@ public class SysTimeDimensionBuilder {
Pattern.compile("\\b(DATE|TIME|TIMESTAMP|YEAR|MONTH|DAY|HOUR|MINUTE|SECOND)\\b",
Pattern.CASE_INSENSITIVE);
public static void addSysTimeDimension(List<Dim> dims, DbAdaptor engineAdaptor) {
public static void addSysTimeDimension(List<Dimension> dims, DbAdaptor engineAdaptor) {
log.debug("addSysTimeDimension before:{}, engineAdaptor:{}", dims, engineAdaptor);
Dim timeDim = getTimeDim(dims);
Dimension timeDim = getTimeDim(dims);
if (timeDim == null) {
timeDim = Dim.getDefault();
timeDim = Dimension.getDefault();
// todo not find the time dimension
return;
}
@@ -34,8 +34,8 @@ public class SysTimeDimensionBuilder {
log.debug("addSysTimeDimension after:{}, engineAdaptor:{}", dims, engineAdaptor);
}
private static Dim generateSysDayDimension(Dim timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim();
private static Dimension generateSysDayDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.DAY.getName());
dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.DAY.name().toLowerCase(),
@@ -47,8 +47,8 @@ public class SysTimeDimensionBuilder {
return dim;
}
private static Dim generateSysWeekDimension(Dim timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim();
private static Dimension generateSysWeekDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.WEEK.getName());
dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.WEEK.name().toLowerCase(),
@@ -60,8 +60,8 @@ public class SysTimeDimensionBuilder {
return dim;
}
private static Dim generateSysMonthDimension(Dim timeDim, DbAdaptor engineAdaptor) {
Dim dim = new Dim();
private static Dimension generateSysMonthDimension(Dimension timeDim, DbAdaptor engineAdaptor) {
Dimension dim = new Dimension();
dim.setBizName(TimeDimensionEnum.MONTH.getName());
dim.setType(DimensionType.partition_time);
dim.setExpr(generateTimeExpr(timeDim, TimeDimensionEnum.MONTH.name().toLowerCase(),
@@ -79,7 +79,8 @@ public class SysTimeDimensionBuilder {
}
// Check whether the time field contains keywords,Generation time expression
private static String generateTimeExpr(Dim timeDim, String dateType, DbAdaptor engineAdaptor) {
private static String generateTimeExpr(Dimension timeDim, String dateType,
DbAdaptor engineAdaptor) {
String bizName = timeDim.getBizName();
String dateFormat = timeDim.getDateFormat();
if (containsTimeKeyword(bizName)) {
@@ -90,8 +91,8 @@ public class SysTimeDimensionBuilder {
}
}
private static Dim getTimeDim(List<Dim> timeDims) {
for (Dim dim : timeDims) {
private static Dimension getTimeDim(List<Dimension> timeDims) {
for (Dimension dim : timeDims) {
if (dim.getType().equals(DimensionType.partition_time)) {
return dim;
}

View File

@@ -207,7 +207,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
modelResp.getName());
List<Dim> timeDims = modelResp.getTimeDimension();
List<Dimension> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),

View File

@@ -27,7 +27,7 @@ public class SchemaDictUpdateListener implements ApplicationListener<DataEvent>
dictWord.setWord(dataItem.getName());
String sign = DictWordType.NATURE_SPILT;
String suffixNature = DictWordType.getSuffixNature(dataItem.getType());
String nature = sign + dataItem.getModelId() + dataItem.getId() + suffixNature;
String nature = sign + dataItem.getModelId() + sign + dataItem.getId() + suffixNature;
String natureWithFrequency = nature + " " + Constants.DEFAULT_FREQUENCY;
dictWord.setNature(nature);
dictWord.setNatureWithFrequency(natureWithFrequency);

View File

@@ -1,6 +1,6 @@
package com.tencent.supersonic.headless.server.manager;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
@@ -18,7 +18,6 @@ import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.Objects;
import java.util.stream.Collectors;
@@ -54,7 +53,7 @@ public class ModelYamlManager {
return dataModelYamlTpl;
}
public static DimensionYamlTpl convert(Dim dim) {
public static DimensionYamlTpl convert(Dimension dim) {
DimensionYamlTpl dimensionYamlTpl = new DimensionYamlTpl();
BeanUtils.copyProperties(dim, dimensionYamlTpl);
dimensionYamlTpl.setName(dim.getBizName());
@@ -85,15 +84,4 @@ public class ModelYamlManager {
return identifyYamlTpl;
}
private static void addInterCntMetric(String datasourceEnName, ModelDetail datasourceDetail) {
Measure measure = new Measure();
measure.setExpr("1");
if (!CollectionUtils.isEmpty(datasourceDetail.getIdentifiers())) {
measure.setExpr(datasourceDetail.getIdentifiers().get(0).getBizName());
}
measure.setAgg("count");
measure.setBizName(String.format("%s_%s", datasourceEnName, "internal_cnt"));
measure.setIsCreateMetric(1);
datasourceDetail.getMeasures().add(measure);
}
}

View File

@@ -23,7 +23,7 @@ public class RuleSemanticModeller implements SemanticModeller {
private ColumnSchema convert(DBColumn dbColumn) {
ColumnSchema columnSchema = new ColumnSchema();
columnSchema.setName(dbColumn.getComment());
columnSchema.setName(dbColumn.getColumnName());
columnSchema.setColumnName(dbColumn.getColumnName());
columnSchema.setComment(dbColumn.getComment());
columnSchema.setDataType(dbColumn.getDataType());

View File

@@ -16,6 +16,7 @@ public class ModelDO {
private Long domainId;
@Deprecated
private Long tagObjectId;
private String name;

View File

@@ -28,6 +28,7 @@ public class QueryStatDO {
private String queryStructCmd;
@TableField("struct_cmd_md5")
private String queryStructCmdMd5;
@TableField("\"sql\"")
private String sql;
private String sqlMd5;
private String queryEngine;

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.mapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import org.apache.ibatis.annotations.Mapper;
@@ -12,11 +11,7 @@ public interface DimensionDOCustomMapper {
void batchInsert(List<DimensionDO> dimensionDOS);
void batchUpdate(List<DimensionDO> dimensionDOS);
void batchUpdateStatus(List<DimensionDO> dimensionDOS);
List<DimensionDO> query(DimensionFilter dimensionFilter);
List<DimensionDO> queryDimensions(DimensionsFilter dimensionsFilter);
}

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.mapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.pojo.MetricFilter;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import org.apache.ibatis.annotations.Mapper;
@@ -20,7 +19,5 @@ public interface MetricDOCustomMapper {
void updateClassificationsBatch(List<MetricDO> metricDOS);
List<MetricDO> query(MetricFilter metricFilter);
List<MetricDO> queryMetrics(MetricsFilter metricsFilter);
}

View File

@@ -1,15 +1,9 @@
package com.tencent.supersonic.headless.server.persistence.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.tencent.supersonic.headless.api.pojo.QueryStat;
import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq;
import com.tencent.supersonic.headless.server.persistence.dataobject.QueryStatDO;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
@Mapper
public interface StatMapper extends BaseMapper<QueryStatDO> {
List<QueryStat> getStatInfo(ItemUseReq itemUseCommend);
}

View File

@@ -118,6 +118,7 @@ public class DictRepositoryImpl implements DictRepository {
wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or()
.like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key));
}
wrapper.lambda().orderByDesc(DictTaskDO::getCreatedAt);
return dictTaskMapper.selectList(wrapper);
}

View File

@@ -1,14 +1,17 @@
package com.tencent.supersonic.headless.server.persistence.repository.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOCustomMapper;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Objects;
@Service
public class DimensionRepositoryImpl implements DimensionRepository {
@@ -50,7 +53,43 @@ public class DimensionRepositoryImpl implements DimensionRepository {
@Override
public List<DimensionDO> getDimension(DimensionFilter dimensionFilter) {
return dimensionDOCustomMapper.query(dimensionFilter);
QueryWrapper<DimensionDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().ne(DimensionDO::getStatus, 3);
if (Objects.nonNull(dimensionFilter.getIds()) && !dimensionFilter.getIds().isEmpty()) {
queryWrapper.lambda().in(DimensionDO::getId, dimensionFilter.getIds());
}
if (StringUtils.isNotBlank(dimensionFilter.getId())) {
queryWrapper.lambda().eq(DimensionDO::getId, dimensionFilter.getId());
}
if (Objects.nonNull(dimensionFilter.getModelIds())
&& !dimensionFilter.getModelIds().isEmpty()) {
queryWrapper.lambda().in(DimensionDO::getModelId, dimensionFilter.getModelIds());
}
if (StringUtils.isNotBlank(dimensionFilter.getName())) {
queryWrapper.lambda().like(DimensionDO::getName, dimensionFilter.getName());
}
if (StringUtils.isNotBlank(dimensionFilter.getId())) {
queryWrapper.lambda().like(DimensionDO::getBizName, dimensionFilter.getBizName());
}
if (Objects.nonNull(dimensionFilter.getStatus())) {
queryWrapper.lambda().eq(DimensionDO::getStatus, dimensionFilter.getStatus());
}
if (Objects.nonNull(dimensionFilter.getSensitiveLevel())) {
queryWrapper.lambda().eq(DimensionDO::getSensitiveLevel,
dimensionFilter.getSensitiveLevel());
}
if (StringUtils.isNotBlank(dimensionFilter.getCreatedBy())) {
queryWrapper.lambda().eq(DimensionDO::getCreatedBy, dimensionFilter.getCreatedBy());
}
if (StringUtils.isNotBlank(dimensionFilter.getKey())) {
String key = dimensionFilter.getKey();
queryWrapper.lambda().like(DimensionDO::getName, key).or()
.like(DimensionDO::getBizName, key).or().like(DimensionDO::getDescription, key)
.or().like(DimensionDO::getAlias, key).or()
.like(DimensionDO::getCreatedBy, key);
}
return dimensionDOMapper.selectList(queryWrapper);
}
@Override

View File

@@ -9,9 +9,11 @@ import com.tencent.supersonic.headless.server.persistence.mapper.MetricQueryDefa
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.pojo.MetricFilter;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Objects;
@Component
public class MetricRepositoryImpl implements MetricRepository {
@@ -73,7 +75,46 @@ public class MetricRepositoryImpl implements MetricRepository {
@Override
public List<MetricDO> getMetric(MetricFilter metricFilter) {
return metricDOCustomMapper.query(metricFilter);
QueryWrapper<MetricDO> queryWrapper = new QueryWrapper<>();
queryWrapper.lambda().ne(MetricDO::getStatus, 3);
if (Objects.nonNull(metricFilter.getIds()) && !metricFilter.getIds().isEmpty()) {
queryWrapper.lambda().in(MetricDO::getId, metricFilter.getIds());
}
if (StringUtils.isNotBlank(metricFilter.getId())) {
queryWrapper.lambda().eq(MetricDO::getId, metricFilter.getId());
}
if (Objects.nonNull(metricFilter.getModelIds()) && !metricFilter.getModelIds().isEmpty()) {
queryWrapper.lambda().in(MetricDO::getModelId, metricFilter.getModelIds());
}
if (StringUtils.isNotBlank(metricFilter.getType())) {
queryWrapper.lambda().eq(MetricDO::getType, metricFilter.getType());
}
if (StringUtils.isNotBlank(metricFilter.getName())) {
queryWrapper.lambda().like(MetricDO::getName, metricFilter.getName());
}
if (StringUtils.isNotBlank(metricFilter.getId())) {
queryWrapper.lambda().like(MetricDO::getBizName, metricFilter.getBizName());
}
if (Objects.nonNull(metricFilter.getStatus())) {
queryWrapper.lambda().eq(MetricDO::getStatus, metricFilter.getStatus());
}
if (Objects.nonNull(metricFilter.getSensitiveLevel())) {
queryWrapper.lambda().eq(MetricDO::getSensitiveLevel, metricFilter.getSensitiveLevel());
}
if (StringUtils.isNotBlank(metricFilter.getCreatedBy())) {
queryWrapper.lambda().eq(MetricDO::getCreatedBy, metricFilter.getCreatedBy());
}
if (Objects.nonNull(metricFilter.getIsPublish()) && metricFilter.getIsPublish() == 1) {
queryWrapper.lambda().eq(MetricDO::getIsPublish, metricFilter.getIsPublish());
}
if (StringUtils.isNotBlank(metricFilter.getKey())) {
String key = metricFilter.getKey();
queryWrapper.lambda().like(MetricDO::getName, key).or().like(MetricDO::getBizName, key)
.or().like(MetricDO::getDescription, key).or().like(MetricDO::getAlias, key)
.or().like(MetricDO::getCreatedBy, key);
}
return metricDOMapper.selectList(queryWrapper);
}
@Override

View File

@@ -51,7 +51,7 @@ public class QueryRuleRepositoryImpl implements QueryRuleRepository {
QueryWrapper<QueryRuleDO> wrapperSys = new QueryWrapper<>();
// 返回系统设置的规则
wrapperSys.or().eq("priority", 0L);
wrapperSys.lambda().or().eq(QueryRuleDO::getPriority, 0L);
List<QueryRuleDO> queryRuleDOListSys = mapper.selectList(wrapperSys);
queryRuleDOList.addAll(queryRuleDOListSys);

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.server.persistence.repository.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
@@ -47,7 +48,7 @@ public class StatRepositoryImpl implements StatRepository {
@SneakyThrows
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseReq) {
List<ItemUseResp> result = new ArrayList<>();
List<QueryStat> statInfos = statMapper.getStatInfo(itemUseReq);
List<QueryStatDO> statInfos = getQueryStats(itemUseReq);
Map<String, Long> map = new ConcurrentHashMap<>();
statInfos.stream().forEach(stat -> {
String dimensions = stat.getDimensions();
@@ -70,6 +71,21 @@ public class StatRepositoryImpl implements StatRepository {
.collect(Collectors.toList());
}
private List<QueryStatDO> getQueryStats(ItemUseReq itemUseReq) {
QueryWrapper<QueryStatDO> queryWrapper = new QueryWrapper<>();
if (Objects.nonNull(itemUseReq.getModelId())) {
queryWrapper.lambda().eq(QueryStatDO::getModelId, itemUseReq.getModelId());
}
if (Objects.nonNull(itemUseReq.getModelIds()) && !itemUseReq.getModelIds().isEmpty()) {
queryWrapper.lambda().in(QueryStatDO::getModelId, itemUseReq.getModelIds());
}
if (Objects.nonNull(itemUseReq.getMetric())) {
queryWrapper.lambda().like(QueryStatDO::getMetrics, itemUseReq.getMetric());
}
return statMapper.selectList(queryWrapper);
}
private void updateStatMapInfo(Map<String, Long> map, String dimensions, String type,
Long dataSetId) {
if (StringUtils.isNotEmpty(dimensions)) {

View File

@@ -47,7 +47,7 @@ public interface DimensionService {
void sendDimensionEventBatch(List<Long> modelIds, EventType eventType);
DataEvent getDataEvent();
DataEvent getAllDataEvents();
Boolean updateDimValueAlias(DimValueAliasReq req, User user);
}

View File

@@ -79,7 +79,7 @@ public class DataSetServiceImpl extends ServiceImpl<DataSetDOMapper, DataSetDO>
DataSetDO dataSetDO = convert(dataSetReq);
dataSetDO.setStatus(StatusEnum.ONLINE.getCode());
DataSetResp dataSetResp = convert(dataSetDO);
conflictCheck(dataSetResp);
// conflictCheck(dataSetResp);
save(dataSetDO);
dataSetResp.setId(dataSetDO.getId());
return dataSetResp;
@@ -90,7 +90,7 @@ public class DataSetServiceImpl extends ServiceImpl<DataSetDOMapper, DataSetDO>
dataSetReq.updatedBy(user.getName());
DataSetDO dataSetDO = convert(dataSetReq);
DataSetResp dataSetResp = convert(dataSetDO);
conflictCheck(dataSetResp);
// conflictCheck(dataSetResp);
updateById(dataSetDO);
return dataSetResp;
}

View File

@@ -225,6 +225,9 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
if (StringUtils.isNotBlank(modelBuildReq.getSql())) {
List<DBColumn> columns =
getColumns(modelBuildReq.getDatabaseId(), modelBuildReq.getSql());
DatabaseResp databaseResp = getDatabase(modelBuildReq.getDatabaseId());
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
columns.forEach(c -> c.setFieldType(engineAdaptor.classifyColumnType(c.getDataType())));
dbColumnMap.put(modelBuildReq.getSql(), columns);
} else {
for (String table : modelBuildReq.getTables()) {

View File

@@ -6,12 +6,9 @@ import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.enums.EventType;
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
@@ -25,23 +22,14 @@ import com.tencent.supersonic.headless.api.pojo.request.DimValueAliasReq;
import com.tencent.supersonic.headless.api.pojo.request.DimensionReq;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq;
import com.tencent.supersonic.headless.api.pojo.request.PageDimensionReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.server.persistence.dataobject.DimensionDO;
import com.tencent.supersonic.headless.server.persistence.mapper.DimensionDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.DimensionRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.DatabaseService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.ModelRelaService;
import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.TagMetaService;
import com.tencent.supersonic.headless.server.service.*;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.DimensionConverter;
import com.tencent.supersonic.headless.server.utils.NameCheckUtils;
@@ -152,11 +140,7 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
DimensionConverter.convert(dimensionDO, dimensionReq);
dimensionRepository.updateDimension(dimensionDO);
if (!oldName.equals(dimensionDO.getName())) {
sendEvent(
DataItem.builder().modelId(dimensionDO.getModelId() + Constants.UNDERLINE)
.newName(dimensionReq.getName()).name(oldName).type(TypeEnums.DIMENSION)
.id(dimensionDO.getId() + Constants.UNDERLINE).build(),
EventType.UPDATE);
sendEvent(getDataItem(dimensionDO), EventType.UPDATE);
}
}
@@ -424,7 +408,7 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
eventPublisher.publishEvent(dataEvent);
}
public DataEvent getDataEvent() {
public DataEvent getAllDataEvents() {
DimensionFilter dimensionFilter = new DimensionFilter();
List<DimensionDO> dimensionDOS = queryDimension(dimensionFilter);
return getDataEvent(dimensionDOS, EventType.ADD);
@@ -464,13 +448,18 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
return true;
}
private DataItem getDataItem(DimensionDO dimensionDO) {
ModelResp modelResp = modelService.getModel(dimensionDO.getModelId());
DimensionResp dimensionResp = DimensionConverter.convert2DimensionResp(dimensionDO,
ImmutableMap.of(modelResp.getId(), modelResp));
return DataItem.builder().id(dimensionResp.getId().toString()).name(dimensionResp.getName())
.bizName(dimensionResp.getBizName()).modelId(dimensionResp.getModelId().toString())
.domainId(dimensionResp.getDomainId().toString()).type(TypeEnums.DIMENSION).build();
}
private DataEvent getDataEvent(List<DimensionDO> dimensionDOS, EventType eventType) {
List<DataItem> dataItems = dimensionDOS.stream()
.map(dimensionDO -> DataItem.builder().id(dimensionDO.getId() + Constants.UNDERLINE)
.name(dimensionDO.getName())
.modelId(dimensionDO.getModelId() + Constants.UNDERLINE)
.type(TypeEnums.DIMENSION).build())
.collect(Collectors.toList());
List<DataItem> dataItems =
dimensionDOS.stream().map(this::getDataItem).collect(Collectors.toList());
return new DataEvent(this, dataItems, eventType);
}

View File

@@ -5,59 +5,26 @@ import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.enums.*;
import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.MetricParam;
import com.tencent.supersonic.headless.api.pojo.MetricQueryDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq;
import com.tencent.supersonic.headless.api.pojo.request.MetricBaseReq;
import com.tencent.supersonic.headless.api.pojo.request.MetricReq;
import com.tencent.supersonic.headless.api.pojo.request.PageMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMapReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetMapInfo;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.MapInfoResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.request.*;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.server.facade.service.ChatLayerService;
import com.tencent.supersonic.headless.server.persistence.dataobject.CollectDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricQueryDefaultConfigDO;
import com.tencent.supersonic.headless.server.persistence.mapper.MetricDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter;
import com.tencent.supersonic.headless.server.pojo.MetricFilter;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import com.tencent.supersonic.headless.server.pojo.ModelCluster;
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.CollectService;
import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.TagMetaService;
import com.tencent.supersonic.headless.server.pojo.*;
import com.tencent.supersonic.headless.server.service.*;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.MetricCheckUtils;
import com.tencent.supersonic.headless.server.utils.MetricConverter;
@@ -70,18 +37,7 @@ import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;
@Service
@@ -667,12 +623,13 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
}
private DataItem getDataItem(MetricDO metricDO) {
MetricResp metricResp =
MetricConverter.convert2MetricResp(metricDO, new HashMap<>(), Lists.newArrayList());
ModelResp modelResp = modelService.getModel(metricDO.getModelId());
MetricResp metricResp = MetricConverter.convert2MetricResp(metricDO,
ImmutableMap.of(modelResp.getId(), modelResp), Lists.newArrayList());
fillDefaultAgg(metricResp);
return DataItem.builder().id(metricDO.getId() + Constants.UNDERLINE)
.name(metricDO.getName()).bizName(metricDO.getBizName())
.modelId(metricDO.getModelId() + Constants.UNDERLINE).type(TypeEnums.METRIC)
return DataItem.builder().id(metricResp.getId().toString()).name(metricResp.getName())
.bizName(metricResp.getBizName()).modelId(metricResp.getModelId().toString())
.domainId(metricResp.getDomainId().toString()).type(TypeEnums.METRIC)
.defaultAgg(metricResp.getDefaultAgg()).build();
}

View File

@@ -12,7 +12,7 @@ import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.DBColumn;
import com.tencent.supersonic.headless.api.pojo.DbSchema;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.ItemDateFilter;
import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -78,23 +78,23 @@ import java.util.stream.Collectors;
@Slf4j
public class ModelServiceImpl implements ModelService {
private ModelRepository modelRepository;
private final ModelRepository modelRepository;
private DatabaseService databaseService;
private final DatabaseService databaseService;
private DimensionService dimensionService;
private final DimensionService dimensionService;
private MetricService metricService;
private final MetricService metricService;
private DomainService domainService;
private final DomainService domainService;
private UserService userService;
private final UserService userService;
private DataSetService dataSetService;
private final DataSetService dataSetService;
private DateInfoRepository dateInfoRepository;
private final DateInfoRepository dateInfoRepository;
private ModelRelaService modelRelaService;
private final ModelRelaService modelRelaService;
ExecutorService executor =
new ThreadPoolExecutor(0, 5, 5L, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
@@ -292,12 +292,9 @@ public class ModelServiceImpl implements ModelService {
if (modelReq.getModelDetail() == null) {
return;
}
List<Dim> dims = modelReq.getModelDetail().getDimensions();
List<Dimension> dims = modelReq.getModelDetail().getDimensions();
List<Measure> measures = modelReq.getModelDetail().getMeasures();
List<Identify> identifies = modelReq.getModelDetail().getIdentifiers();
if (CollectionUtils.isEmpty(dims)) {
throw new InvalidArgumentException("缺少维度信息");
}
for (Measure measure : measures) {
String measureForbiddenCharacters =
NameCheckUtils.findForbiddenCharacters(measure.getName());
@@ -308,7 +305,7 @@ public class ModelServiceImpl implements ModelService {
throw new InvalidArgumentException(message);
}
}
for (Dim dim : dims) {
for (Dimension dim : dims) {
String dimForbiddenCharacters = NameCheckUtils.findForbiddenCharacters(dim.getName());
if (StringUtils.isNotBlank(dim.getName())
&& StringUtils.isNotBlank(dimForbiddenCharacters)) {
@@ -337,12 +334,10 @@ public class ModelServiceImpl implements ModelService {
Set<String> relations = new HashSet<>();
for (ModelRela modelRela : modelRelas) {
if (modelRela.getFromModelId().equals(modelReq.getId())) {
modelRela.getJoinConditions().stream()
.forEach(r -> relations.add(r.getLeftField()));
modelRela.getJoinConditions().forEach(r -> relations.add(r.getLeftField()));
}
if (modelRela.getToModelId().equals(modelReq.getId())) {
modelRela.getJoinConditions().stream()
.forEach(r -> relations.add(r.getRightField()));
modelRela.getJoinConditions().forEach(r -> relations.add(r.getRightField()));
}
}
if (relations.isEmpty()) {
@@ -351,10 +346,10 @@ public class ModelServiceImpl implements ModelService {
// any identify in model relation should not be deleted
if (modelReq.getModelDetail() == null
|| CollectionUtils.isEmpty(modelReq.getModelDetail().getIdentifiers())) {
throw new InvalidArgumentException(String.format("模型关联中主键/外键不存在, 请检查"));
throw new InvalidArgumentException("模型关联中主键/外键不存在, 请检查");
}
List<String> modelIdentifiers = modelReq.getModelDetail().getIdentifiers().stream()
.map(i -> i.getBizName()).collect(Collectors.toList());
.map(Identify::getBizName).collect(Collectors.toList());
for (String rela : relations) {
if (!modelIdentifiers.contains(rela)) {
throw new InvalidArgumentException(String.format("模型关联中主键/外键(%s)不存在, 请检查", rela));
@@ -459,7 +454,7 @@ public class ModelServiceImpl implements ModelService {
}
ModelFilter modelFilter = new ModelFilter();
modelFilter.setDomainIds(domainIds);
modelFilter.setIncludesDetail(false);
modelFilter.setIncludesDetail(true);
List<ModelResp> modelResps = getModelList(modelFilter);
if (CollectionUtils.isEmpty(modelResps)) {
return modelResps;

View File

@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.server.service.impl;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.ValueDistribution;
import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType;
@@ -93,7 +93,7 @@ public class TagQueryServiceImpl implements TagQueryService {
private void correctDateConf(ItemValueReq itemValueReq, TagResp tag, User user)
throws Exception {
ModelResp model = modelService.getModel(tag.getModelId());
List<Dim> timeDimension = model.getTimeDimension();
List<Dimension> timeDimension = model.getTimeDimension();
if (CollectionUtils.isEmpty(timeDimension)) {
itemValueReq.setDateConf(null);
return;
@@ -112,12 +112,12 @@ public class TagQueryServiceImpl implements TagQueryService {
itemValueReq.setDateConf(dateConf);
}
private String queryTagDate(Dim dim) {
private String queryTagDate(Dimension dim) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dim.getDateFormat());
return LocalDate.now().plusDays(-dayBefore).format(formatter);
}
private String queryTagDateFromDbBySql(Dim dim, TagResp tag, ItemValueReq itemValueReq,
private String queryTagDateFromDbBySql(Dimension dim, TagResp tag, ItemValueReq itemValueReq,
User user) {
String sqlPattern = "select max(%s) as %s from tbl where %s is not null";
@@ -129,7 +129,7 @@ public class TagQueryServiceImpl implements TagQueryService {
if (Objects.nonNull(itemValueReq) && itemValueReq.getDateConf().getUnit() > 1) {
ModelResp model = modelService.getModel(tag.getModelId());
if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension();
List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) {
String dateFormat = timeDims.get(0).getDateFormat();
if (StringUtils.isEmpty(dateFormat)) {

View File

@@ -69,7 +69,7 @@ public class MetaEmbeddingTask implements CommandLineRunner {
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(metricDataItems));
List<DataItem> dimensionDataItems = dimensionService.getDataEvent().getDataItems();
List<DataItem> dimensionDataItems = dimensionService.getAllDataEvents().getDataItems();
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(dimensionDataItems));
} catch (Exception e) {

View File

@@ -14,7 +14,7 @@ import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.ItemValueConfig;
import com.tencent.supersonic.headless.api.pojo.request.DictItemReq;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
@@ -401,7 +401,7 @@ public class DictUtils {
private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model,
Integer itemValueDateStart, Integer itemValueDateEnd) {
if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension();
List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) {
DateConf dateConf = new DateConf();
dateConf.setDateMode(DateConf.DateMode.BETWEEN);
@@ -496,7 +496,7 @@ public class DictUtils {
private boolean partitionedModel(Long modelId) {
ModelResp model = modelService.getModel(modelId);
if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension();
List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) {
return true;
}
@@ -507,7 +507,7 @@ public class DictUtils {
private String generateDictDateFilterRecent(DictItemResp dictItemResp) {
ModelResp model = modelService.getModel(dictItemResp.getModelId());
if (Objects.nonNull(model)) {
List<Dim> timeDims = model.getTimeDimension();
List<Dimension> timeDims = model.getTimeDimension();
if (!CollectionUtils.isEmpty(timeDims)) {
String dateFormat = timeDims.get(0).getDateFormat();
if (StringUtils.isEmpty(dateFormat)) {

View File

@@ -105,6 +105,8 @@ public class DimensionConverter {
dimensionResp.setType(getType(dimensionDO.getType()));
dimensionResp.setTypeEnum(TypeEnums.DIMENSION);
dimensionResp.setIsTag(dimensionDO.getIsTag());
dimensionResp.setDomainId(modelRespMap
.getOrDefault(dimensionResp.getModelId(), new ModelResp()).getDomainId());
return dimensionResp;
}

View File

@@ -7,7 +7,7 @@ import com.tencent.supersonic.common.pojo.enums.PublishEnum;
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByFieldParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMetricParams;
@@ -82,7 +82,7 @@ public class MetricConverter {
metricResp.setModelName(modelResp.getName());
metricResp.setModelBizName(modelResp.getBizName());
metricResp.setDomainId(modelResp.getDomainId());
List<Dim> timeDims = modelResp.getTimeDimension();
List<Dimension> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
metricResp.setContainsPartitionDimensions(true);
}

View File

@@ -8,7 +8,7 @@ import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.ColumnSchema;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -111,7 +111,7 @@ public class ModelConverter {
return measureResp;
}
public static DimensionReq convert(Dim dim, ModelDO modelDO) {
public static DimensionReq convert(Dimension dim, ModelDO modelDO) {
DimensionReq dimensionReq = new DimensionReq();
dimensionReq.setName(dim.getName());
dimensionReq.setBizName(dim.getBizName());
@@ -129,7 +129,6 @@ public class ModelConverter {
dimensionReq.setType(dim.getType().name());
dimensionReq
.setDescription(Objects.isNull(dim.getDescription()) ? "" : dim.getDescription());
dimensionReq.setIsTag(dim.getIsTag());
dimensionReq.setTypeParams(dim.getTypeParams());
return dimensionReq;
}
@@ -165,8 +164,8 @@ public class ModelConverter {
public static ModelReq convert(ModelSchema modelSchema, ModelBuildReq modelBuildReq,
String tableName) {
ModelReq modelReq = new ModelReq();
modelReq.setName(modelSchema.getName());
modelReq.setBizName(modelSchema.getBizName());
modelReq.setName(modelBuildReq.getName());
modelReq.setBizName(modelBuildReq.getBizName());
modelReq.setDatabaseId(modelBuildReq.getDatabaseId());
modelReq.setDomainId(modelBuildReq.getDomainId());
ModelDetail modelDetail = new ModelDetail();
@@ -188,7 +187,7 @@ public class ModelConverter {
columnSchema.getAgg().getOperator(), 1);
modelDetail.getMeasures().add(measure);
} else {
Dim dim = new Dim(columnSchema.getName(), columnSchema.getColumnName(),
Dimension dim = new Dimension(columnSchema.getName(), columnSchema.getColumnName(),
DimensionType.valueOf(columnSchema.getFiledType().name()), 1);
modelDetail.getDimensions().add(dim);
}
@@ -198,10 +197,12 @@ public class ModelConverter {
}
private static IdentifyType getIdentifyType(FieldType fieldType) {
if (FieldType.foreign_key.equals(fieldType) || FieldType.primary_key.equals(fieldType)) {
if (FieldType.primary_key.equals(fieldType)) {
return IdentifyType.primary;
} else {
} else if (FieldType.foreign_key.equals(fieldType)) {
return IdentifyType.foreign;
} else {
return null;
}
}
@@ -214,7 +215,7 @@ public class ModelConverter {
return modelDescs;
}
private static boolean isCreateDimension(Dim dim) {
private static boolean isCreateDimension(Dimension dim) {
return dim.getIsCreateDimension() == 1 && StringUtils.isNotBlank(dim.getName());
}
@@ -226,7 +227,7 @@ public class ModelConverter {
return measure.getIsCreateMetric() == 1 && StringUtils.isNotBlank(measure.getName());
}
public static List<Dim> getDimToCreateDimension(ModelDetail modelDetail) {
public static List<Dimension> getDimToCreateDimension(ModelDetail modelDetail) {
if (CollectionUtils.isEmpty(modelDetail.getDimensions())) {
return Lists.newArrayList();
}
@@ -254,7 +255,7 @@ public class ModelConverter {
List<DimensionReq> dimensionReqs = Lists.newArrayList();
ModelDetail modelDetail =
JSONObject.parseObject(modelDO.getModelDetail(), ModelDetail.class);
List<Dim> dims = getDimToCreateDimension(modelDetail);
List<Dimension> dims = getDimToCreateDimension(modelDetail);
if (!CollectionUtils.isEmpty(dims)) {
dimensionReqs = dims.stream().filter(dim -> StringUtils.isNotBlank(dim.getName()))
.map(dim -> convert(dim, modelDO)).collect(Collectors.toList());

View File

@@ -40,28 +40,4 @@
<result column="query_opt_mode" property="queryOptMode"/>
</resultMap>
<select id="getStatInfo"
resultType="com.tencent.supersonic.headless.api.pojo.QueryStat">
select *
from s2_query_stat_info
<where>
<if test="startTime != null">
and start_time >= #{startTime}String.valueOf(queryFilter.getValue())
</if>
<if test="modelId != null">
and model_id = #{modelId}
</if>
<if test="modelIds != null and modelIds.size() > 0">
and model_id in
<foreach item="id" collection="modelIds" open="(" separator="," close=")">
#{id}
</foreach>
</if>
<if test="metric != null">
and metrics like concat('%',#{metric},'%')
</if>
</where>
</select>
</mapper>

View File

@@ -116,63 +116,6 @@
</foreach>
</update>
<select id="query" resultMap="ResultMapWithBLOBs">
select t.*, (case when t1.id is not null then 1 else 0 end) as isTag
from s2_dimension t
left join (
select *
from s2_tag
where type = 'DIMENSION'
) t1 on t.id = t1.item_id
where status != 3
<if test="key != null and key != ''">
and ( t.id like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.biz_name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.alias like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.description like CONCAT('%',#{key , jdbcType=VARCHAR},'%') )
</if>
<if test="id != null">
and t.id like CONCAT('%',#{id , jdbcType=VARCHAR},'%')
</if>
<if test="name != null and name != '' ">
and t.name like CONCAT('%',#{name , jdbcType=VARCHAR},'%')
</if>
<if test="bizName != null and bizName != ''">
and t.biz_name like CONCAT('%',#{bizName , jdbcType=VARCHAR},'%')
</if>
<if test="sensitiveLevel != null">
and t.sensitive_level = #{sensitiveLevel}
</if>
<if test="status != null">
and t.status = #{status}
</if>
<if test="modelIds != null and modelIds.size >0">
and t.model_id in
<foreach collection="modelIds" index="index" item="model" open="(" close=")"
separator=",">
#{model}
</foreach>
</if>
<if test="ids != null and ids.size >0">
and t.id in
<foreach collection="ids" index="index" item="id" open="(" close=")"
separator=",">
#{id}
</foreach>
</if>
<if test="createdBy != null">
and t.created_by = #{createdBy}
</if>
<if test="isTag != null and isTag == 1">
and t1.id is not null
</if>
<if test="isTag != null and isTag == 0">
and t1.id is null
</if>
</select>
<select id="queryDimensions" resultMap="ResultMapWithBLOBs">
select *
from s2_dimension

View File

@@ -127,70 +127,6 @@
</foreach>
</update>
<select id="query" resultMap="ResultMapWithBLOBs">
select t.*, (case when t1.id is not null then 1 else 0 end) as isTag
from s2_metric t
left join (
select *
from s2_tag
where type = 'METRIC'
) t1 on t.id = t1.item_id
where t.status != 3
<if test="type != null and type != ''">
and t.type = #{type}
</if>
<if test="key != null and key != ''">
and ( t.id like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.biz_name like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.description like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.alias like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.classifications like CONCAT('%',#{key , jdbcType=VARCHAR},'%') or
t.created_by like CONCAT('%',#{key , jdbcType=VARCHAR},'%') )
</if>
<if test="id != null">
and t.id like CONCAT('%',#{id , jdbcType=VARCHAR},'%')
</if>
<if test="name != null and name != '' ">
and t.name like CONCAT('%',#{name , jdbcType=VARCHAR},'%')
</if>
<if test="bizName != null and bizName != ''">
and t.biz_name like CONCAT('%',#{bizName , jdbcType=VARCHAR},'%')
</if>
<if test="sensitiveLevel != null">
and t.sensitive_level = #{sensitiveLevel}
</if>
<if test="status != null">
and t.status = #{status}
</if>
<if test="modelIds != null and modelIds.size >0">
and t.model_id in
<foreach collection="modelIds" index="index" item="model" open="(" close=")"
separator=",">
#{model}
</foreach>
</if>
<if test="ids != null and ids.size >0">
and t.id in
<foreach collection="ids" index="index" item="id" open="(" close=")"
separator=",">
#{id}
</foreach>
</if>
<if test="createdBy != null">
and t.created_by = #{createdBy}
</if>
<if test="isTag != null and isTag == 1">
and t1.id is not null
</if>
<if test="isTag != null and isTag == 0">
and t1.id is null
</if>
<if test="isPublish != null and isPublish == 1">
and (t.created_by = #{userName} or t.is_publish = 1)
</if>
</select>
<select id="queryMetrics" resultMap="ResultMapWithBLOBs">
select *
from s2_metric

View File

@@ -5,7 +5,7 @@ import com.tencent.supersonic.auth.api.authentication.service.UserService;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
@@ -95,11 +95,11 @@ class ModelServiceImplTest {
List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);
@@ -134,11 +134,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a"));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date_a", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date_a", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page_a", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page_a", DimensionType.categorical, 0);
dimension2.setExpr("page_a");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);
@@ -169,11 +169,11 @@ class ModelServiceImplTest {
List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);
@@ -207,11 +207,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名", IdentifyType.primary.name(), "user_name"));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);
@@ -252,11 +252,11 @@ class ModelServiceImplTest {
identifiers.add(new Identify("用户名_a", IdentifyType.primary.name(), "user_name_a"));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date_a", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date_a", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page_a", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page_a", DimensionType.categorical, 0);
dimension2.setExpr("page_a");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);

View File

@@ -4,6 +4,7 @@ com.tencent.supersonic.headless.chat.mapper.SchemaMapper=\
com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \
com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \
com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \
com.tencent.supersonic.headless.chat.mapper.TimeFieldMapper,\
com.tencent.supersonic.headless.chat.mapper.TermDescMapper
com.tencent.supersonic.headless.chat.parser.SemanticParser=\
@@ -27,9 +28,9 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\

View File

@@ -89,14 +89,14 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>();
List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("公司名称", "company_name", DimensionType.categorical, 1));
dimensions.add(new Dim("总部地点", "headquarter_address", DimensionType.categorical, 1));
dimensions.add(new Dim("成立时间", "company_established_time", DimensionType.time, 1));
dimensions.add(new Dim("创始人", "founder", DimensionType.categorical, 1));
dimensions.add(new Dim("首席执行官", "ceo", DimensionType.categorical, 1));
dimensions.add(new Dimension("公司名称", "company_name", DimensionType.categorical, 1));
dimensions.add(new Dimension("总部地点", "headquarter_address", DimensionType.categorical, 1));
dimensions.add(new Dimension("成立时间", "company_established_time", DimensionType.time, 1));
dimensions.add(new Dimension("创始人", "founder", DimensionType.categorical, 1));
dimensions.add(new Dimension("首席执行官", "ceo", DimensionType.categorical, 1));
List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("公司id", IdentifyType.primary.name(), "company_id"));
@@ -131,12 +131,13 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>();
List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("品牌名称", "brand_name", DimensionType.categorical, 1));
dimensions.add(new Dim("品牌成立时间", "brand_established_time", DimensionType.time, 1));
dimensions.add(new Dim("法定代表人", "legal_representative", DimensionType.categorical, 1));
dimensions.add(new Dimension("品牌名称", "brand_name", DimensionType.categorical, 1));
dimensions.add(new Dimension("品牌成立时间", "brand_established_time", DimensionType.time, 1));
dimensions
.add(new Dimension("法定代表人", "legal_representative", DimensionType.categorical, 1));
List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("品牌id", IdentifyType.primary.name(), "brand_id"));
@@ -168,10 +169,10 @@ public class S2CompanyDemo extends S2BaseDemo {
modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail();
List<Dim> dimensions = new ArrayList<>();
List<Dimension> dimensions = new ArrayList<>();
modelDetail.setDimensions(dimensions);
dimensions.add(new Dim("财年", "year_time", DimensionType.time, 1, "year_time", "yyyy",
dimensions.add(new Dimension("财年", "year_time", DimensionType.time, 1, "year_time", "yyyy",
new DimensionTimeTypeParams("false", "year")));
List<Identify> identifiers = new ArrayList<>();

View File

@@ -16,7 +16,7 @@ import com.tencent.supersonic.headless.api.pojo.AggregateTypeDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.DetailTypeDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
@@ -101,10 +101,10 @@ public class S2SingerDemo extends S2BaseDemo {
identifiers.add(identify);
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
dimensions.add(new Dim("活跃区域", "act_area", DimensionType.categorical, 1));
dimensions.add(new Dim("代表作", "song_name", DimensionType.categorical, 1));
dimensions.add(new Dim("流派", "genre", DimensionType.categorical, 1));
List<Dimension> dimensions = new ArrayList<>();
dimensions.add(new Dimension("活跃区域", "act_area", DimensionType.categorical, 1));
dimensions.add(new Dimension("代表作", "song_name", DimensionType.categorical, 1));
dimensions.add(new Dimension("流派", "genre", DimensionType.categorical, 1));
modelDetail.setDimensions(dimensions);
Measure measure1 = new Measure("播放量", "js_play_cnt", "sum", 1);

View File

@@ -27,7 +27,7 @@ import com.tencent.supersonic.common.util.ChatAppManager;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Field;
import com.tencent.supersonic.headless.api.pojo.FieldParam;
@@ -199,9 +199,9 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户", IdentifyType.primary.name(), "user_name", 1));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
dimensions.add(new Dim("部门", "department", DimensionType.categorical, 1));
// dimensions.add(new Dim("用户", "user_name", DimensionType.categorical, 1));
List<Dimension> dimensions = new ArrayList<>();
dimensions.add(new Dimension("部门", "department", DimensionType.categorical, 1));
// dimensions.add(new Dimension("用户", "user_name", DimensionType.categorical, 1));
modelDetail.setDimensions(dimensions);
List<Field> fields = Lists.newArrayList();
fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build());
@@ -230,11 +230,11 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户名", IdentifyType.foreign.name(), "user_name", 0));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.partition_time, 0);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("", "page", DimensionType.categorical, 0);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);
@@ -274,11 +274,11 @@ public class S2VisitsDemo extends S2BaseDemo {
identifiers.add(new Identify("用户", IdentifyType.foreign.name(), "user_name", 0));
modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("数据日期", "imp_date", DimensionType.partition_time, 1);
List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
Dim dimension2 = new Dim("页面", "page", DimensionType.categorical, 1);
Dimension dimension2 = new Dimension("页面", "page", DimensionType.categorical, 1);
dimension2.setExpr("page");
dimensions.add(dimension2);
modelDetail.setDimensions(dimensions);

View File

@@ -4,6 +4,7 @@ com.tencent.supersonic.headless.chat.mapper.SchemaMapper=\
com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \
com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \
com.tencent.supersonic.headless.chat.mapper.QueryFilterMapper, \
com.tencent.supersonic.headless.chat.mapper.TimeFieldMapper,\
com.tencent.supersonic.headless.chat.mapper.TermDescMapper
com.tencent.supersonic.headless.chat.parser.SemanticParser=\
@@ -27,9 +28,9 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\

View File

@@ -0,0 +1,26 @@
spring:
datasource:
url: jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/${DB_NAME}?stringtype=unspecified
username: ${DB_USERNAME}
password: ${DB_PASSWORD}
driver-class-name: org.postgresql.Driver
sql:
init:
enabled: false
mode: always
username: ${DB_USERNAME}
password: ${DB_PASSWORD}
schema-locations: classpath:db/schema-postgres.sql,classpath:db/schema-postgres-demo.sql
data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql
s2:
embedding:
store:
provider: PGVECTOR
base:
url: ${DB_HOST}
port: ${DB_PORT:5432}
databaseName: ${DB_NAME}
user: ${DB_USERNAME}
password: ${DB_PASSWORD}
dimension: 512

View File

@@ -11,4 +11,16 @@ spring:
username: postgres
password: postgres
schema-locations: classpath:db/schema-postgres.sql,classpath:db/schema-postgres-demo.sql
data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql
data-locations: classpath:db/data-postgres.sql,classpath:db/data-postgres-demo.sql
#s2:
# embedding:
# store:
# provider: PGVECTOR
# base:
# url: 127.0.0.1
# port: 5432
# databaseName: postgres
# user: postgres
# password: postgres
# dimension: 512

View File

@@ -1,6 +0,0 @@
tom _1_2 1
alice _1_2 1
lucy _1_2 1
dean _1_2 1
john _1_2 1
jack _1_2 1

View File

@@ -1,4 +0,0 @@
p1 _3_4 3
p3 _3_4 4
p4 _3_4 4
p5 _3_4 2

View File

@@ -1,3 +0,0 @@
欧美 _4_5 1
港台 _4_5 3
内地 _4_5 2

View File

@@ -1,6 +0,0 @@
美人鱼 _4_6 1
青花瓷 _4_6 1
Love#Story _4_6 1
爱情转移 _4_6 1
人间烟火 _4_6 1
光的方向 _4_6 1

View File

@@ -1,6 +0,0 @@
张碧晨 _4_8 1
周杰伦 _4_8 1
Taylor#Swift _4_8 1
程响 _4_8 1
林俊杰 _4_8 1
陈奕迅 _4_8 1

View File

@@ -1,10 +1,10 @@
-- S2VisitsDemo
MERGE into s2_user_department (user_name, department) values ('jack','HR');
MERGE into s2_user_department (user_name, department) values ('tom','sales');
MERGE into s2_user_department (user_name, department) values ('lucy','marketing');
MERGE into s2_user_department (user_name, department) values ('john','strategy');
MERGE into s2_user_department (user_name, department) values ('alice','sales');
MERGE into s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_user_department (user_name, department) values ('jack','HR');
INSERT INTO s2_user_department (user_name, department) values ('tom','sales');
INSERT INTO s2_user_department (user_name, department) values ('lucy','marketing');
INSERT INTO s2_user_department (user_name, department) values ('john','strategy');
INSERT INTO s2_user_department (user_name, department) values ('alice','sales');
INSERT INTO s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1');
@@ -1020,61 +1020,61 @@ INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (
INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 8 DAY), 'lucy', '0.039935860913407284', 'p2');
-- S2ArtistDemo
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
MERGE into genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
MERGE into genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
-- S2CompanyDemo
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
INSERT INTO company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
INSERT INTO brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_133',12300000000, 2300000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_134',12400000000, 2400000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_135',12500000000, 2500000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_136',12600000000, 2600000000,40,40);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_137',12700000000, 2700000000,50,50);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_138',12800000000, 2800000000,20,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_139',12900000000, 2900000000,60,70);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_140',13000000000, 3000000000,80,100);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_131',13100000000,3100000000, 10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_132',13200000000, 3200000000,20,20);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_133',13300000000, 3300000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_134',13400000000, 3400000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_135',13500000000, 3500000000,30,30);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_136',13600000000, 3600000000,40,40);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_137',13700000000, 3700000000,50,50);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_138',13800000000, 3800000000,20,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_139',13900000000, 3900000000,60,70);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_140',14000000000, 4000000000,80,100);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_133',12300000000, 2300000000,30,30);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_134',12400000000, 2400000000,10,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_135',12500000000, 2500000000,30,30);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_136',12600000000, 2600000000,40,40);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_137',12700000000, 2700000000,50,50);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_138',12800000000, 2800000000,20,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_139',12900000000, 2900000000,60,70);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_140',13000000000, 3000000000,80,100);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_131',13100000000,3100000000, 10,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_132',13200000000, 3200000000,20,20);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_133',13300000000, 3300000000,30,30);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_134',13400000000, 3400000000,10,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_135',13500000000, 3500000000,30,30);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_136',13600000000, 3600000000,40,40);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_137',13700000000, 3700000000,50,50);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_138',13800000000, 3800000000,20,10);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_139',13900000000, 3900000000,60,70);
INSERT INTO brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2024','item_brand_13_140',14000000000, 4000000000,80,100);

View File

@@ -1,10 +1,10 @@
-- S2VisitsDemo
MERGE into s2_user_department (user_name, department) values ('jack','HR');
MERGE into s2_user_department (user_name, department) values ('tom','sales');
MERGE into s2_user_department (user_name, department) values ('lucy','marketing');
MERGE into s2_user_department (user_name, department) values ('john','strategy');
MERGE into s2_user_department (user_name, department) values ('alice','sales');
MERGE into s2_user_department (user_name, department) values ('dean','marketing');
INSERT into s2_user_department (user_name, department) values ('jack','HR');
INSERT into s2_user_department (user_name, department) values ('tom','sales');
INSERT into s2_user_department (user_name, department) values ('lucy','marketing');
INSERT into s2_user_department (user_name, department) values ('john','strategy');
INSERT into s2_user_department (user_name, department) values ('alice','sales');
INSERT into s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE, 'jack', 'p1');
@@ -1016,43 +1016,43 @@ INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (
INSERT INTO s2_stay_time_statis (imp_date, user_name, stay_hours, page) VALUES (CURRENT_DATE - INTERVAL '8 DAY', 'lucy', '0.039935860913407284', 'p2');
-- S2ArtistDemo
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
MERGE INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('tagore',8,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('nazrul',7,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('民间',9,'锡尔赫特、吉大港、库斯蒂亚');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
INSERT INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
-- S2CompanyDemo
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
MERGE into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_131','微软','西雅图','1975','盖茨','纳德拉',102300000000,210000);
INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_132','特斯拉','加州','2003','艾伯哈德','马斯克',376800000000,140473);
INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_133','谷歌','加州','1998','拉里佩奇','劈柴',321600000000,182503);
INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_134','亚马逊','加州','1994','贝索斯','贝索斯',28800000000,950000);
INSERT into company(company_id,company_name,headquarter_address,company_established_time,founder,ceo,annual_turnover,employee_count) VALUES ('item_enterprise_13_135','英伟达','杭州','1993','黄仁勋','黄仁勋',67500000000,29000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
MERGE into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_131','Office','1990','item_enterprise_13_131','盖茨',50000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_132','Windows','1991','item_enterprise_13_131','盖茨',50000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_133','Model 3','2017','item_enterprise_13_132','马斯克',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_134','Model Y','2020','item_enterprise_13_132','马斯克',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_135','Google','2003','item_enterprise_13_133','拉里佩奇',50000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_136','Android','2007','item_enterprise_13_133','拉里佩奇',50000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_137','aws','2004','item_enterprise_13_134','贝索斯',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_138','kindle','2007','item_enterprise_13_134','贝索斯',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_139','H100','2022','item_enterprise_13_135','黄仁勋',100000000);
INSERT into brand(brand_id,brand_name,brand_established_time,company_id,legal_representative,registered_capital) VALUES ('item_brand_13_140','A100','2021','item_enterprise_13_135','黄仁勋',100000000);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_131',12100000000, 2100000000,10,10);
insert into brand_revenue(year_time,brand_id,revenue,profit,revenue_growth_year_on_year,profit_growth_year_on_year) VALUES ('2023','item_brand_13_132',12200000000, 2200000000,20,20);

View File

@@ -73,10 +73,10 @@ CREATE TABLE IF NOT EXISTS s2_chat_memory (
agent_id INTEGER,
db_schema TEXT,
s2_sql TEXT,
status char(10),
llm_review char(10),
status varchar(20),
llm_review varchar(20),
llm_comment TEXT,
human_review char(10),
human_review varchar(20),
human_comment TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

View File

@@ -42,7 +42,7 @@ public class SemanticModellerTest extends BaseTest {
Assertions.assertEquals(2, userModelSchema.getColumnSchemas().size());
Assertions.assertEquals(FieldType.primary_key,
userModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.dimension,
Assertions.assertEquals(FieldType.categorical,
userModelSchema.getColumnByName("department").getFiledType());
ModelSchema stayTimeModelSchema = modelSchemaMap.get("s2_stay_time_statis");
@@ -51,7 +51,7 @@ public class SemanticModellerTest extends BaseTest {
stayTimeModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.partition_time,
stayTimeModelSchema.getColumnByName("imp_date").getFiledType());
Assertions.assertEquals(FieldType.dimension,
Assertions.assertEquals(FieldType.categorical,
stayTimeModelSchema.getColumnByName("page").getFiledType());
Assertions.assertEquals(FieldType.measure,
stayTimeModelSchema.getColumnByName("stay_hours").getFiledType());
@@ -75,9 +75,9 @@ public class SemanticModellerTest extends BaseTest {
Assertions.assertEquals(5, pvModelSchema.getColumnSchemas().size());
Assertions.assertEquals(FieldType.partition_time,
pvModelSchema.getColumnByName("imp_date").getFiledType());
Assertions.assertEquals(FieldType.dimension,
Assertions.assertEquals(FieldType.categorical,
pvModelSchema.getColumnByName("user_name").getFiledType());
Assertions.assertEquals(FieldType.dimension,
Assertions.assertEquals(FieldType.categorical,
pvModelSchema.getColumnByName("page").getFiledType());
Assertions.assertEquals(FieldType.measure,
pvModelSchema.getColumnByName("pv").getFiledType());

View File

@@ -1,2 +0,0 @@
[InternetShortcut]
URL=https://github.com/hankcs/HanLP/

View File

@@ -1,3 +0,0 @@
龚 nr 1
龛 ng 1
龛影 n 1

View File

@@ -1,4 +0,0 @@
买@水果 1
然后@来 1
我@遗忘 10
遗忘@我 10

View File

@@ -1,8 +0,0 @@
阿里云 _10_20 5
天猫 _10_20 5
腾讯游戏 _10_20 5
度小满 _10_20 5
京东金融 _10_20 5

View File

@@ -1,8 +0,0 @@
张勇 _10_22 5
马化腾 _10_22 5
朱光 _10_22 5
刘强东 _10_22 5

View File

@@ -1,5 +0,0 @@
hr _1_1 876
sales _1_1 872
marketing _1_1 310
strategy _1_1 360
sales _1_1 500

Some files were not shown because too many files have changed in this diff Show More