(improvement)(chat) Rules, large models, and query dimension values support timelessness. (#1522)

This commit is contained in:
lexluo09
2024-08-07 13:29:07 +08:00
committed by GitHub
parent c8fe6d2d04
commit 208686de46
28 changed files with 442 additions and 613 deletions

View File

@@ -44,7 +44,7 @@ public class ModelDetail {
return Lists.newArrayList(); return Lists.newArrayList();
} }
return dimensions.stream() return dimensions.stream()
.filter(dim -> DimensionType.time.name().equalsIgnoreCase(dim.getType())) .filter(dim -> DimensionType.partition_time.name().equalsIgnoreCase(dim.getType()))
.collect(Collectors.toList()); .collect(Collectors.toList());
} }

View File

@@ -168,99 +168,129 @@ public class QueryStructReq extends SemanticQueryReq {
return result; return result;
} }
private String buildSql(QueryStructReq queryStructReq, boolean isBizName) throws JSQLParserException { private String buildSql(QueryStructReq queryStructReq, boolean isBizName)
throws JSQLParserException {
ParenthesedSelect select = new ParenthesedSelect(); ParenthesedSelect select = new ParenthesedSelect();
//1.Set the select items (columns)
PlainSelect plainSelect = new PlainSelect(); PlainSelect plainSelect = new PlainSelect();
// 1. Set the select items (columns)
plainSelect.setSelectItems(buildSelectItems(queryStructReq));
// 2. Set the table name
plainSelect.setFromItem(new Table(queryStructReq.getTableName()));
// 3. Set the order by clause
plainSelect.setOrderByElements(buildOrderByElements(queryStructReq));
// 4. Set the group by clause
plainSelect.setGroupByElement(buildGroupByElement(queryStructReq));
// 5. Set the limit clause
plainSelect.setLimit(buildLimit(queryStructReq));
select.setSelect(plainSelect);
// 6. Set where clause
return addWhereClauses(select.toString(), queryStructReq, isBizName);
}
private List<SelectItem<?>> buildSelectItems(QueryStructReq queryStructReq) {
List<SelectItem<?>> selectItems = new ArrayList<>(); List<SelectItem<?>> selectItems = new ArrayList<>();
List<String> groups = queryStructReq.getGroups(); List<String> groups = queryStructReq.getGroups();
if (!CollectionUtils.isEmpty(groups)) { if (!CollectionUtils.isEmpty(groups)) {
for (String group : groups) { for (String group : groups) {
selectItems.add(new SelectItem(new Column(group))); selectItems.add(new SelectItem(new Column(group)));
} }
} }
List<Aggregator> aggregators = queryStructReq.getAggregators(); List<Aggregator> aggregators = queryStructReq.getAggregators();
if (!CollectionUtils.isEmpty(aggregators)) { if (!CollectionUtils.isEmpty(aggregators)) {
for (Aggregator aggregator : aggregators) { for (Aggregator aggregator : aggregators) {
String columnName = aggregator.getColumn(); selectItems.add(buildAggregatorSelectItem(aggregator, queryStructReq));
if (queryStructReq.getQueryType().isNativeAggQuery()) {
selectItems.add(new SelectItem(new Column(columnName)));
} else {
Function sumFunction = new Function();
AggOperatorEnum func = aggregator.getFunc();
if (AggOperatorEnum.UNKNOWN.equals(func)) {
func = AggOperatorEnum.SUM;
}
sumFunction.setName(func.getOperator());
if (AggOperatorEnum.COUNT_DISTINCT.equals(func)) {
sumFunction.setName("count");
sumFunction.setDistinct(true);
}
sumFunction.setParameters(new ExpressionList(new Column(columnName)));
SelectItem selectExpressionItem = new SelectItem(sumFunction);
String alias = StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
selectExpressionItem.setAlias(new Alias(alias));
selectItems.add(selectExpressionItem);
}
} }
} }
plainSelect.setSelectItems(selectItems);
//2.Set the table name
Table table = new Table(queryStructReq.getTableName());
plainSelect.setFromItem(table);
//3.Set the order by clause return selectItems;
}
private SelectItem buildAggregatorSelectItem(Aggregator aggregator, QueryStructReq queryStructReq) {
String columnName = aggregator.getColumn();
if (queryStructReq.getQueryType().isNativeAggQuery()) {
return new SelectItem(new Column(columnName));
} else {
Function function = new Function();
AggOperatorEnum func = aggregator.getFunc();
if (AggOperatorEnum.UNKNOWN.equals(func)) {
func = AggOperatorEnum.SUM;
}
function.setName(func.getOperator());
if (AggOperatorEnum.COUNT_DISTINCT.equals(func)) {
function.setName("count");
function.setDistinct(true);
}
function.setParameters(new ExpressionList(new Column(columnName)));
SelectItem selectExpressionItem = new SelectItem(function);
String alias = StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
selectExpressionItem.setAlias(new Alias(alias));
return selectExpressionItem;
}
}
private List<OrderByElement> buildOrderByElements(QueryStructReq queryStructReq) {
List<Order> orders = queryStructReq.getOrders(); List<Order> orders = queryStructReq.getOrders();
List<OrderByElement> orderByElements = new ArrayList<>();
if (!CollectionUtils.isEmpty(orders)) { if (!CollectionUtils.isEmpty(orders)) {
List<OrderByElement> orderByElements = new ArrayList<>();
for (Order order : orders) { for (Order order : orders) {
if (StringUtils.isBlank(order.getColumn())) { if (StringUtils.isBlank(order.getColumn())) {
continue; continue;
} }
OrderByElement orderByElement = new OrderByElement(); OrderByElement orderByElement = new OrderByElement();
orderByElement.setExpression(new Column(order.getColumn())); orderByElement.setExpression(new Column(order.getColumn()));
orderByElement.setAsc(false); orderByElement.setAsc(Constants.ASC_UPPER.equalsIgnoreCase(order.getDirection()));
if (Constants.ASC_UPPER.equalsIgnoreCase(order.getDirection())) {
orderByElement.setAsc(true);
}
orderByElements.add(orderByElement); orderByElements.add(orderByElement);
} }
plainSelect.setOrderByElements(orderByElements);
} }
//4.Set the group by clause return orderByElements;
}
private GroupByElement buildGroupByElement(QueryStructReq queryStructReq) {
List<String> groups = queryStructReq.getGroups();
if (!CollectionUtils.isEmpty(groups) && !queryStructReq.getQueryType().isNativeAggQuery()) { if (!CollectionUtils.isEmpty(groups) && !queryStructReq.getQueryType().isNativeAggQuery()) {
GroupByElement groupByElement = new GroupByElement(); GroupByElement groupByElement = new GroupByElement();
for (String group : groups) { for (String group : groups) {
groupByElement.addGroupByExpression(new Column(group)); groupByElement.addGroupByExpression(new Column(group));
} }
plainSelect.setGroupByElement(groupByElement); return groupByElement;
} }
return null;
}
//5.Set the limit clause private Limit buildLimit(QueryStructReq queryStructReq) {
if (Objects.nonNull(queryStructReq.getLimit())) { if (Objects.isNull(queryStructReq.getLimit())) {
Limit limit = new Limit(); return null;
limit.setRowCount(new LongValue(queryStructReq.getLimit()));
plainSelect.setLimit(limit);
} }
//select.setSelectBody(plainSelect); Limit limit = new Limit();
select.setSelect(plainSelect); limit.setRowCount(new LongValue(queryStructReq.getLimit()));
return limit;
}
private String addWhereClauses(String sql, QueryStructReq queryStructReq, boolean isBizName)
//6.Set where throws JSQLParserException {
List<Filter> dimensionFilters = queryStructReq.getDimensionFilters();
SqlFilterUtils sqlFilterUtils = ContextUtils.getBean(SqlFilterUtils.class); SqlFilterUtils sqlFilterUtils = ContextUtils.getBean(SqlFilterUtils.class);
String whereClause = sqlFilterUtils.getWhereClause(dimensionFilters, isBizName); String whereClause = sqlFilterUtils.getWhereClause(queryStructReq.getDimensionFilters(), isBizName);
String sql = select.toString();
if (StringUtils.isNotBlank(whereClause)) { if (StringUtils.isNotBlank(whereClause)) {
Expression expression = CCJSqlParserUtil.parseCondExpression(whereClause); Expression expression = CCJSqlParserUtil.parseCondExpression(whereClause);
sql = SqlAddHelper.addWhere(sql, expression); sql = SqlAddHelper.addWhere(sql, expression);
} }
//7.Set DateInfo
DateModeUtils dateModeUtils = ContextUtils.getBean(DateModeUtils.class); DateModeUtils dateModeUtils = ContextUtils.getBean(DateModeUtils.class);
String dateWhereStr = dateModeUtils.getDateWhereStr(queryStructReq.getDateInfo()); String dateWhereStr = dateModeUtils.getDateWhereStr(queryStructReq.getDateInfo());
if (StringUtils.isNotBlank(dateWhereStr)) { if (StringUtils.isNotBlank(dateWhereStr)) {
Expression expression = CCJSqlParserUtil.parseCondExpression(dateWhereStr); Expression expression = CCJSqlParserUtil.parseCondExpression(dateWhereStr);
sql = SqlAddHelper.addWhere(sql, expression); sql = SqlAddHelper.addWhere(sql, expression);

View File

@@ -7,6 +7,7 @@ import com.tencent.supersonic.common.pojo.ChatModelConfig;
import com.tencent.supersonic.common.pojo.Text2SQLExemplar; import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
import com.tencent.supersonic.common.pojo.enums.Text2SQLType; import com.tencent.supersonic.common.pojo.enums.Text2SQLType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.QueryDataType; import com.tencent.supersonic.headless.api.pojo.QueryDataType;
import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo; import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
@@ -66,4 +67,10 @@ public class ChatQueryContext {
.collect(Collectors.toList()); .collect(Collectors.toList());
return candidateQueries; return candidateQueries;
} }
public boolean containsPartitionDimensions(Long dataSetId) {
SemanticSchema semanticSchema = this.getSemanticSchema();
DataSetSchema dataSetSchema = semanticSchema.getDataSetSchemaMap().get(dataSetId);
return dataSetSchema.containsPartitionDimensions();
}
} }

View File

@@ -7,6 +7,10 @@ import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.SemanticSchema; import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
import com.tencent.supersonic.headless.chat.ChatQueryContext; import com.tencent.supersonic.headless.chat.ChatQueryContext;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
@@ -15,10 +19,6 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.util.CollectionUtils;
/** /**
* basic semantic correction functionality, offering common methods and an * basic semantic correction functionality, offering common methods and an
@@ -61,14 +61,16 @@ public abstract class BaseSemanticCorrector implements SemanticCorrector {
return elements.stream(); return elements.stream();
}) })
.collect(Collectors.toMap(a -> a, a -> a, (k1, k2) -> k1)); .collect(Collectors.toMap(a -> a, a -> a, (k1, k2) -> k1));
result.put(TimeDimensionEnum.DAY.getChName(), TimeDimensionEnum.DAY.getChName());
result.put(TimeDimensionEnum.MONTH.getChName(), TimeDimensionEnum.MONTH.getChName());
result.put(TimeDimensionEnum.WEEK.getChName(), TimeDimensionEnum.WEEK.getChName());
result.put(TimeDimensionEnum.DAY.getName(), TimeDimensionEnum.DAY.getChName()); if (chatQueryContext.containsPartitionDimensions(dataSetId)) {
result.put(TimeDimensionEnum.MONTH.getName(), TimeDimensionEnum.MONTH.getChName()); result.put(TimeDimensionEnum.DAY.getChName(), TimeDimensionEnum.DAY.getChName());
result.put(TimeDimensionEnum.WEEK.getName(), TimeDimensionEnum.WEEK.getChName()); result.put(TimeDimensionEnum.MONTH.getChName(), TimeDimensionEnum.MONTH.getChName());
result.put(TimeDimensionEnum.WEEK.getChName(), TimeDimensionEnum.WEEK.getChName());
result.put(TimeDimensionEnum.DAY.getName(), TimeDimensionEnum.DAY.getChName());
result.put(TimeDimensionEnum.MONTH.getName(), TimeDimensionEnum.MONTH.getChName());
result.put(TimeDimensionEnum.WEEK.getName(), TimeDimensionEnum.WEEK.getChName());
}
return result; return result;
} }

View File

@@ -61,11 +61,6 @@ public class ParserConfig extends ParameterConfig {
"解析结果展示个数", "前端展示的解析个数", "解析结果展示个数", "前端展示的解析个数",
"number", "Parser相关配置"); "number", "Parser相关配置");
public static final Parameter PARSER_S2SQL_ENABLE =
new Parameter("s2.parser.s2sql.switch", "true",
"", "",
"bool", "Parser相关配置");
@Override @Override
public List<Parameter> getSysParameters() { public List<Parameter> getSysParameters() {
return Lists.newArrayList( return Lists.newArrayList(

View File

@@ -86,8 +86,9 @@ public class LLMRequestService {
&& Objects.nonNull(semanticSchema.getDataSetSchemaMap().get(dataSetId))) { && Objects.nonNull(semanticSchema.getDataSetSchemaMap().get(dataSetId))) {
TimeDefaultConfig timeDefaultConfig = semanticSchema.getDataSetSchemaMap() TimeDefaultConfig timeDefaultConfig = semanticSchema.getDataSetSchemaMap()
.get(dataSetId).getTagTypeTimeDefaultConfig(); .get(dataSetId).getTagTypeTimeDefaultConfig();
if (!Objects.equals(timeDefaultConfig.getUnit(), -1)) { if (!Objects.equals(timeDefaultConfig.getUnit(), -1)
// 数据集查询设置 时间不为-1时才添加 '数据日期' 字段 && queryCtx.containsPartitionDimensions(dataSetId)) {
// 数据集配置了数据日期字段,并查询设置 时间不为-1时才添加 '数据日期' 字段
fieldNameList.add(TimeDimensionEnum.DAY.getChName()); fieldNameList.add(TimeDimensionEnum.DAY.getChName());
} }
} }

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.chat.parser.rule;
import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.chat.ChatQueryContext; import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.parser.SemanticParser; import com.tencent.supersonic.headless.chat.parser.SemanticParser;
import com.tencent.supersonic.headless.chat.query.QueryManager; import com.tencent.supersonic.headless.chat.query.QueryManager;
@@ -10,7 +11,6 @@ import com.tencent.supersonic.headless.chat.query.rule.RuleSemanticQuery;
import com.xkzhangsan.time.nlp.TimeNLP; import com.xkzhangsan.time.nlp.TimeNLP;
import com.xkzhangsan.time.nlp.TimeNLPUtil; import com.xkzhangsan.time.nlp.TimeNLPUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.ParseException; import java.text.ParseException;
@@ -22,8 +22,6 @@ import java.util.Stack;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
;
/** /**
* TimeRangeParser extracts time range specified in the user query * TimeRangeParser extracts time range specified in the user query
* based on keyword matching. * based on keyword matching.
@@ -52,123 +50,127 @@ public class TimeRangeParser implements SemanticParser {
} }
if (dateConf != null) { if (dateConf != null) {
if (queryContext.getCandidateQueries().size() > 0) { updateQueryContext(queryContext, dateConf);
for (SemanticQuery query : queryContext.getCandidateQueries()) { }
query.getParseInfo().setDateInfo(dateConf); }
query.getParseInfo().setScore(query.getParseInfo().getScore()
+ dateConf.getDetectWord().length()); private void updateQueryContext(ChatQueryContext queryContext, DateConf dateConf) {
} if (!queryContext.getCandidateQueries().isEmpty()) {
} else if (QueryManager.containsRuleQuery(queryContext.getContextParseInfo().getQueryMode())) { for (SemanticQuery query : queryContext.getCandidateQueries()) {
RuleSemanticQuery semanticQuery = QueryManager.createRuleQuery( query.getParseInfo().setDateInfo(dateConf);
queryContext.getContextParseInfo().getQueryMode()); query.getParseInfo().setScore(query.getParseInfo().getScore() + dateConf.getDetectWord().length());
// inherit parse info from context }
queryContext.getContextParseInfo().setDateInfo(dateConf); } else {
queryContext.getContextParseInfo().setScore(queryContext.getContextParseInfo().getScore() SemanticParseInfo contextParseInfo = queryContext.getContextParseInfo();
+ dateConf.getDetectWord().length()); if (QueryManager.containsRuleQuery(contextParseInfo.getQueryMode())) {
semanticQuery.setParseInfo(queryContext.getContextParseInfo()); RuleSemanticQuery semanticQuery = QueryManager.createRuleQuery(contextParseInfo.getQueryMode());
contextParseInfo.setDateInfo(dateConf);
contextParseInfo.setScore(contextParseInfo.getScore() + dateConf.getDetectWord().length());
semanticQuery.setParseInfo(contextParseInfo);
queryContext.getCandidateQueries().add(semanticQuery); queryContext.getCandidateQueries().add(semanticQuery);
} }
} }
} }
private DateConf parseDateCN(String queryText) { private DateConf parseDateCN(String queryText) {
Date startDate = null;
Date endDate;
String detectWord = null;
List<TimeNLP> times = TimeNLPUtil.parse(queryText); List<TimeNLP> times = TimeNLPUtil.parse(queryText);
if (times.size() > 0) { if (times.isEmpty()) {
startDate = times.get(0).getTime();
detectWord = times.get(0).getTimeExpression();
} else {
return null; return null;
} }
Date startDate = times.get(0).getTime();
String detectWord = times.get(0).getTimeExpression();
Date endDate = times.size() > 1 ? times.get(1).getTime() : startDate;
if (times.size() > 1) { if (times.size() > 1) {
endDate = times.get(1).getTime(); detectWord += "~" + times.get(1).getTimeExpression();
detectWord += "~" + times.get(0).getTimeExpression();
} else {
endDate = startDate;
} }
return getDateConf(startDate, endDate, detectWord); return getDateConf(startDate, endDate, detectWord);
} }
private DateConf parseDateNumber(String queryText) { private DateConf parseDateNumber(String queryText) {
String startDate;
String endDate = null;
String detectWord = null;
Matcher dateMatcher = DATE_PATTERN_NUMBER.matcher(queryText); Matcher dateMatcher = DATE_PATTERN_NUMBER.matcher(queryText);
if (dateMatcher.find()) { if (!dateMatcher.find()) {
startDate = dateMatcher.group();
detectWord = startDate;
} else {
return null; return null;
} }
if (dateMatcher.find()) { String startDateStr = dateMatcher.group();
endDate = dateMatcher.group(); String detectWord = startDateStr;
detectWord += "~" + endDate; String endDateStr = dateMatcher.find() ? dateMatcher.group() : startDateStr;
if (!startDateStr.equals(endDateStr)) {
detectWord += "~" + endDateStr;
} }
endDate = endDate != null ? endDate : startDate;
try { try {
return getDateConf(DATE_FORMAT_NUMBER.parse(startDate), DATE_FORMAT_NUMBER.parse(endDate), detectWord); Date startDate = DATE_FORMAT_NUMBER.parse(startDateStr);
Date endDate = DATE_FORMAT_NUMBER.parse(endDateStr);
return getDateConf(startDate, endDate, detectWord);
} catch (ParseException e) { } catch (ParseException e) {
return null; return null;
} }
} }
private DateConf parseRecent(String queryText) { private DateConf parseRecent(String queryText) {
Matcher m = RECENT_PATTERN_CN.matcher(queryText); Matcher matcher = RECENT_PATTERN_CN.matcher(queryText);
if (m.matches()) { if (!matcher.matches()) {
int num = 0; return null;
String enNum = m.group("enNum");
String zhNum = m.group("zhNum");
if (enNum != null) {
num = Integer.parseInt(enNum);
} else if (zhNum != null) {
num = zhNumParse(zhNum);
}
if (num > 0) {
DateConf info = new DateConf();
String zhPeriod = m.group("zhPeriod");
int days;
switch (zhPeriod) {
case "":
days = 7;
info.setPeriod(Constants.WEEK);
break;
case "":
days = 30;
info.setPeriod(Constants.MONTH);
break;
case "":
days = 365;
info.setPeriod(Constants.YEAR);
break;
default:
days = 1;
info.setPeriod(Constants.DAY);
}
days = days * num;
info.setDateMode(DateConf.DateMode.RECENT);
String detectWord = "" + num + zhPeriod;
if (StringUtils.isNotEmpty(m.group("periodStr"))) {
detectWord = m.group("periodStr");
}
info.setDetectWord(detectWord);
info.setStartDate(LocalDate.now().minusDays(days).toString());
info.setEndDate(LocalDate.now().minusDays(1).toString());
info.setUnit(num);
return info;
}
} }
int num = parseNumber(matcher);
if (num <= 0) {
return null;
}
String zhPeriod = matcher.group("zhPeriod");
int days = getDaysByPeriod(zhPeriod) * num;
String detectWord = matcher.group("periodStr");
return null; DateConf info = new DateConf();
info.setPeriod(getPeriodConstant(zhPeriod));
info.setDateMode(DateConf.DateMode.RECENT);
info.setDetectWord(detectWord);
info.setStartDate(LocalDate.now().minusDays(days).toString());
info.setEndDate(LocalDate.now().minusDays(1).toString());
info.setUnit(num);
return info;
}
private int parseNumber(Matcher matcher) {
String enNum = matcher.group("enNum");
String zhNum = matcher.group("zhNum");
if (enNum != null) {
return Integer.parseInt(enNum);
} else if (zhNum != null) {
return zhNumParse(zhNum);
}
return 0;
}
private int getDaysByPeriod(String zhPeriod) {
switch (zhPeriod) {
case "":
return 7;
case "":
return 30;
case "":
return 365;
default:
return 1;
}
}
private String getPeriodConstant(String zhPeriod) {
switch (zhPeriod) {
case "":
return Constants.WEEK;
case "":
return Constants.MONTH;
case "":
return Constants.YEAR;
default:
return Constants.DAY;
}
} }
private int zhNumParse(String zhNumStr) { private int zhNumParse(String zhNumStr) {
@@ -176,10 +178,9 @@ public class TimeRangeParser implements SemanticParser {
String numStr = "一二三四五六七八九"; String numStr = "一二三四五六七八九";
String unitStr = "十百千万亿"; String unitStr = "十百千万亿";
String[] ssArr = zhNumStr.split(""); for (char c : zhNumStr.toCharArray()) {
for (String e : ssArr) { int numIndex = numStr.indexOf(c);
int numIndex = numStr.indexOf(e); int unitIndex = unitStr.indexOf(c);
int unitIndex = unitStr.indexOf(e);
if (numIndex != -1) { if (numIndex != -1) {
stack.push(numIndex + 1); stack.push(numIndex + 1);
} else if (unitIndex != -1) { } else if (unitIndex != -1) {
@@ -192,7 +193,7 @@ public class TimeRangeParser implements SemanticParser {
} }
} }
return stack.stream().mapToInt(s -> s).sum(); return stack.stream().mapToInt(Integer::intValue).sum();
} }
private DateConf getDateConf(Date startDate, Date endDate, String detectWord) { private DateConf getDateConf(Date startDate, Date endDate, String detectWord) {
@@ -207,5 +208,4 @@ public class TimeRangeParser implements SemanticParser {
info.setDetectWord(detectWord); info.setDetectWord(detectWord);
return info; return info;
} }
} }

View File

@@ -5,12 +5,11 @@ import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Filter; import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.Order; import com.tencent.supersonic.common.pojo.Order;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema; import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.chat.parser.ParserConfig; import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder; import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import lombok.ToString; import lombok.ToString;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -21,8 +20,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.PARSER_S2SQL_ENABLE;
@Slf4j @Slf4j
@ToString @ToString
public abstract class BaseSemanticQuery implements SemanticQuery, Serializable { public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
@@ -43,6 +40,19 @@ public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
return QueryReqBuilder.buildStructReq(parseInfo); return QueryReqBuilder.buildStructReq(parseInfo);
} }
@Override
public SemanticQueryReq buildSemanticQueryReq() {
return QueryReqBuilder.buildS2SQLReq(parseInfo.getSqlInfo(), parseInfo.getDataSetId());
}
protected void initS2SqlByStruct(DataSetSchema dataSetSchema) {
QueryStructReq queryStructReq = convertQueryStruct();
convertBizNameToName(dataSetSchema, queryStructReq);
QuerySqlReq querySQLReq = queryStructReq.convert();
parseInfo.getSqlInfo().setParsedS2SQL(querySQLReq.getSql());
parseInfo.getSqlInfo().setCorrectedS2SQL(querySQLReq.getSql());
}
protected void convertBizNameToName(DataSetSchema dataSetSchema, QueryStructReq queryStructReq) { protected void convertBizNameToName(DataSetSchema dataSetSchema, QueryStructReq queryStructReq) {
Map<String, String> bizNameToName = dataSetSchema.getBizNameToName(); Map<String, String> bizNameToName = dataSetSchema.getBizNameToName();
bizNameToName.putAll(TimeDimensionEnum.getNameToNameMap()); bizNameToName.putAll(TimeDimensionEnum.getNameToNameMap());
@@ -74,17 +84,4 @@ public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
} }
} }
protected void initS2SqlByStruct(DataSetSchema dataSetSchema) {
ParserConfig parserConfig = ContextUtils.getBean(ParserConfig.class);
boolean s2sqlEnable = Boolean.valueOf(parserConfig.getParameterValue(PARSER_S2SQL_ENABLE));
if (!s2sqlEnable) {
return;
}
QueryStructReq queryStructReq = convertQueryStruct();
convertBizNameToName(dataSetSchema, queryStructReq);
QuerySqlReq querySQLReq = queryStructReq.convert();
parseInfo.getSqlInfo().setParsedS2SQL(querySQLReq.getSql());
parseInfo.getSqlInfo().setCorrectedS2SQL(querySQLReq.getSql());
}
} }

View File

@@ -3,10 +3,8 @@ package com.tencent.supersonic.headless.chat.query.llm.s2sql;
import com.tencent.supersonic.auth.api.authentication.pojo.User; import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema; import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SqlInfo; import com.tencent.supersonic.headless.api.pojo.SqlInfo;
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import com.tencent.supersonic.headless.chat.query.QueryManager; import com.tencent.supersonic.headless.chat.query.QueryManager;
import com.tencent.supersonic.headless.chat.query.llm.LLMSemanticQuery; import com.tencent.supersonic.headless.chat.query.llm.LLMSemanticQuery;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -25,11 +23,6 @@ public class LLMSqlQuery extends LLMSemanticQuery {
return QUERY_MODE; return QUERY_MODE;
} }
@Override
public SemanticQueryReq buildSemanticQueryReq() {
return QueryReqBuilder.buildS2SQLReq(parseInfo.getSqlInfo(), parseInfo.getDataSetId());
}
@Override @Override
public void initS2Sql(DataSetSchema dataSetSchema, User user) { public void initS2Sql(DataSetSchema dataSetSchema, User user) {
SqlInfo sqlInfo = parseInfo.getSqlInfo(); SqlInfo sqlInfo = parseInfo.getSqlInfo();

View File

@@ -27,6 +27,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -41,7 +42,7 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
} }
public List<SchemaElementMatch> match(List<SchemaElementMatch> candidateElementMatches, public List<SchemaElementMatch> match(List<SchemaElementMatch> candidateElementMatches,
ChatQueryContext queryCtx) { ChatQueryContext queryCtx) {
return queryMatcher.match(candidateElementMatches); return queryMatcher.match(candidateElementMatches);
} }
@@ -56,20 +57,30 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
fillSchemaElement(parseInfo, semanticSchema); fillSchemaElement(parseInfo, semanticSchema);
fillScore(parseInfo); fillScore(parseInfo);
fillDateConf(parseInfo, chatQueryContext.getContextParseInfo()); fillDateConfByInherited(parseInfo, chatQueryContext);
} }
private void fillDateConf(SemanticParseInfo queryParseInfo, SemanticParseInfo chatParseInfo) { public boolean needFillDateConf(ChatQueryContext chatQueryContext) {
if (queryParseInfo.getDateInfo() != null || chatParseInfo.getDateInfo() == null) { Long dataSetId = parseInfo.getDataSetId();
if (Objects.isNull(dataSetId) || dataSetId <= 0L) {
return false;
}
return chatQueryContext.containsPartitionDimensions(dataSetId);
}
private void fillDateConfByInherited(SemanticParseInfo queryParseInfo, ChatQueryContext chatQueryContext) {
SemanticParseInfo contextParseInfo = chatQueryContext.getContextParseInfo();
if (queryParseInfo.getDateInfo() != null || contextParseInfo.getDateInfo() == null
|| needFillDateConf(chatQueryContext)) {
return; return;
} }
if ((QueryManager.isTagQuery(queryParseInfo.getQueryMode()) if ((QueryManager.isTagQuery(queryParseInfo.getQueryMode())
&& QueryManager.isTagQuery(chatParseInfo.getQueryMode())) && QueryManager.isTagQuery(contextParseInfo.getQueryMode()))
|| (QueryManager.isMetricQuery(queryParseInfo.getQueryMode()) || (QueryManager.isMetricQuery(queryParseInfo.getQueryMode())
&& QueryManager.isMetricQuery(chatParseInfo.getQueryMode()))) { && QueryManager.isMetricQuery(contextParseInfo.getQueryMode()))) {
// inherit date info from context // inherit date info from context
queryParseInfo.setDateInfo(chatParseInfo.getDateInfo()); queryParseInfo.setDateInfo(contextParseInfo.getDateInfo());
queryParseInfo.getDateInfo().setInherited(true); queryParseInfo.getDateInfo().setInherited(true);
} }
} }
@@ -142,13 +153,15 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
} }
private void addToFilters(Map<Long, List<SchemaElementMatch>> id2Values, SemanticParseInfo parseInfo, private void addToFilters(Map<Long, List<SchemaElementMatch>> id2Values, SemanticParseInfo parseInfo,
SemanticSchema semanticSchema, SchemaElementType entity) { SemanticSchema semanticSchema, SchemaElementType entity) {
if (id2Values == null || id2Values.isEmpty()) { if (id2Values == null || id2Values.isEmpty()) {
return; return;
} }
for (Entry<Long, List<SchemaElementMatch>> entry : id2Values.entrySet()) { for (Entry<Long, List<SchemaElementMatch>> entry : id2Values.entrySet()) {
SchemaElement dimension = semanticSchema.getElement(entity, entry.getKey()); SchemaElement dimension = semanticSchema.getElement(entity, entry.getKey());
if (dimension.containsPartitionTime()) {
continue;
}
if (entry.getValue().size() == 1) { if (entry.getValue().size() == 1) {
SchemaElementMatch schemaMatch = entry.getValue().get(0); SchemaElementMatch schemaMatch = entry.getValue().get(0);
QueryFilter dimensionFilter = new QueryFilter(); QueryFilter dimensionFilter = new QueryFilter();
@@ -173,34 +186,6 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
} }
} }
private void addToValues(SemanticSchema semanticSchema, SchemaElementType entity,
Map<Long, List<SchemaElementMatch>> id2Values, SchemaElementMatch schemaMatch) {
SchemaElement element = schemaMatch.getElement();
SchemaElement entityElement = semanticSchema.getElement(entity, element.getId());
if (entityElement != null) {
if (id2Values.containsKey(element.getId())) {
id2Values.get(element.getId()).add(schemaMatch);
} else {
id2Values.put(element.getId(), new ArrayList<>(Arrays.asList(schemaMatch)));
}
}
}
@Override
public SemanticQueryReq buildSemanticQueryReq() {
String queryMode = parseInfo.getQueryMode();
if (parseInfo.getDataSetId() == null || StringUtils.isEmpty(queryMode)
|| !QueryManager.containsRuleQuery(queryMode)) {
// reach here some error may happen
log.error("not find QueryMode");
throw new RuntimeException("not find QueryMode");
}
QueryStructReq queryStructReq = convertQueryStruct();
return queryStructReq.convert(true);
}
protected boolean isMultiStructQuery() { protected boolean isMultiStructQuery() {
return false; return false;
} }
@@ -224,7 +209,7 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
} }
public static List<RuleSemanticQuery> resolve(Long dataSetId, List<SchemaElementMatch> candidateElementMatches, public static List<RuleSemanticQuery> resolve(Long dataSetId, List<SchemaElementMatch> candidateElementMatches,
ChatQueryContext chatQueryContext) { ChatQueryContext chatQueryContext) {
List<RuleSemanticQuery> matchedQueries = new ArrayList<>(); List<RuleSemanticQuery> matchedQueries = new ArrayList<>();
for (RuleSemanticQuery semanticQuery : QueryManager.getRuleQueries()) { for (RuleSemanticQuery semanticQuery : QueryManager.getRuleQueries()) {
List<SchemaElementMatch> matches = semanticQuery.match(candidateElementMatches, chatQueryContext); List<SchemaElementMatch> matches = semanticQuery.match(candidateElementMatches, chatQueryContext);

View File

@@ -25,34 +25,35 @@ public abstract class DetailListQuery extends DetailSemanticQuery {
private void addEntityDetailAndOrderByMetric(ChatQueryContext chatQueryContext, SemanticParseInfo parseInfo) { private void addEntityDetailAndOrderByMetric(ChatQueryContext chatQueryContext, SemanticParseInfo parseInfo) {
Long dataSetId = parseInfo.getDataSetId(); Long dataSetId = parseInfo.getDataSetId();
if (Objects.nonNull(dataSetId) && dataSetId > 0L) { if (Objects.isNull(dataSetId) || dataSetId <= 0L) {
DataSetSchema dataSetSchema = chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(dataSetId); return;
if (dataSetSchema != null && Objects.nonNull(dataSetSchema.getEntity())) { }
Set<SchemaElement> dimensions = new LinkedHashSet<>(); DataSetSchema dataSetSchema = chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(dataSetId);
Set<SchemaElement> metrics = new LinkedHashSet<>(); if (dataSetSchema != null && Objects.nonNull(dataSetSchema.getEntity())) {
Set<Order> orders = new LinkedHashSet<>(); Set<SchemaElement> dimensions = new LinkedHashSet<>();
TagTypeDefaultConfig tagTypeDefaultConfig = dataSetSchema.getTagTypeDefaultConfig(); Set<SchemaElement> metrics = new LinkedHashSet<>();
if (tagTypeDefaultConfig != null && tagTypeDefaultConfig.getDefaultDisplayInfo() != null) { Set<Order> orders = new LinkedHashSet<>();
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds())) { TagTypeDefaultConfig tagTypeDefaultConfig = dataSetSchema.getTagTypeDefaultConfig();
metrics = tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds() if (tagTypeDefaultConfig != null && tagTypeDefaultConfig.getDefaultDisplayInfo() != null) {
.stream().map(id -> { if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds())) {
SchemaElement metric = dataSetSchema.getElement(SchemaElementType.METRIC, id); metrics = tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds()
if (metric != null) { .stream().map(id -> {
orders.add(new Order(metric.getBizName(), Constants.DESC_UPPER)); SchemaElement metric = dataSetSchema.getElement(SchemaElementType.METRIC, id);
} if (metric != null) {
return metric; orders.add(new Order(metric.getBizName(), Constants.DESC_UPPER));
}).filter(Objects::nonNull).collect(Collectors.toSet()); }
} return metric;
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds())) { }).filter(Objects::nonNull).collect(Collectors.toSet());
dimensions = tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds().stream() }
.map(id -> dataSetSchema.getElement(SchemaElementType.DIMENSION, id)) if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds())) {
.filter(Objects::nonNull).collect(Collectors.toSet()); dimensions = tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds().stream()
} .map(id -> dataSetSchema.getElement(SchemaElementType.DIMENSION, id))
.filter(Objects::nonNull).collect(Collectors.toSet());
} }
parseInfo.setDimensions(dimensions);
parseInfo.setMetrics(metrics);
parseInfo.setOrders(orders);
} }
parseInfo.setDimensions(dimensions);
parseInfo.setMetrics(metrics);
parseInfo.setOrders(orders);
} }
} }

View File

@@ -14,6 +14,7 @@ import lombok.extern.slf4j.Slf4j;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
@Slf4j @Slf4j
@@ -39,28 +40,31 @@ public abstract class DetailSemanticQuery extends RuleSemanticQuery {
parseInfo.setQueryType(QueryType.DETAIL); parseInfo.setQueryType(QueryType.DETAIL);
parseInfo.setLimit(DETAIL_MAX_RESULTS); parseInfo.setLimit(DETAIL_MAX_RESULTS);
if (parseInfo.getDateInfo() == null) { if (!needFillDateConf(chatQueryContext)) {
DataSetSchema dataSetSchema = return;
chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId()); }
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig(); Map<Long, DataSetSchema> dataSetSchemaMap = chatQueryContext.getSemanticSchema().getDataSetSchemaMap();
DataSetSchema dataSetSchema = dataSetSchemaMap.get(parseInfo.getDataSetId());
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
if (Objects.nonNull(timeDefaultConfig)
&& Objects.nonNull(timeDefaultConfig.getUnit())
&& timeDefaultConfig.getUnit() != -1) {
DateConf dateInfo = new DateConf(); DateConf dateInfo = new DateConf();
if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit()) int unit = timeDefaultConfig.getUnit();
&& timeDefaultConfig.getUnit() != -1) { String startDate = LocalDate.now().plusDays(-unit).toString();
int unit = timeDefaultConfig.getUnit(); String endDate = startDate;
String startDate = LocalDate.now().plusDays(-unit).toString(); if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
String endDate = startDate; dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) { } else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
dateInfo.setDateMode(DateConf.DateMode.BETWEEN); dateInfo.setDateMode(DateConf.DateMode.RECENT);
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) { endDate = LocalDate.now().plusDays(-1).toString();
dateInfo.setDateMode(DateConf.DateMode.RECENT);
endDate = LocalDate.now().plusDays(-1).toString();
}
dateInfo.setUnit(unit);
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
dateInfo.setStartDate(startDate);
dateInfo.setEndDate(endDate);
parseInfo.setDateInfo(dateInfo);
} }
dateInfo.setUnit(unit);
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
dateInfo.setStartDate(startDate);
dateInfo.setEndDate(endDate);
parseInfo.setDateInfo(dateInfo);
} }
} }

View File

@@ -1,24 +1,24 @@
package com.tencent.supersonic.headless.chat.query.rule.metric; package com.tencent.supersonic.headless.chat.query.rule.metric;
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ENTITY;
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ID;
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.OptionType.REQUIRED;
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.RequireNumberType.AT_LEAST;
import com.tencent.supersonic.common.pojo.Filter; import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.FilterType; import com.tencent.supersonic.common.pojo.enums.FilterType;
import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq; import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j; import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ENTITY;
import org.springframework.stereotype.Component; import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ID;
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.OptionType.REQUIRED;
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.RequireNumberType.AT_LEAST;
@Slf4j @Slf4j
@Component @Component

View File

@@ -38,30 +38,35 @@ public abstract class MetricSemanticQuery extends RuleSemanticQuery {
public void fillParseInfo(ChatQueryContext chatQueryContext) { public void fillParseInfo(ChatQueryContext chatQueryContext) {
super.fillParseInfo(chatQueryContext); super.fillParseInfo(chatQueryContext);
parseInfo.setLimit(METRIC_MAX_RESULTS); parseInfo.setLimit(METRIC_MAX_RESULTS);
if (parseInfo.getDateInfo() == null) { fillDateInfo(chatQueryContext);
DataSetSchema dataSetSchema = }
chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId());
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getMetricTypeTimeDefaultConfig(); private void fillDateInfo(ChatQueryContext chatQueryContext) {
DateConf dateInfo = new DateConf(); if (parseInfo.getDateInfo() != null || !needFillDateConf(chatQueryContext)) {
//加上时间!=-1 判断 return;
if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit()) }
&& timeDefaultConfig.getUnit() != -1) { DataSetSchema dataSetSchema =
int unit = timeDefaultConfig.getUnit(); chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId());
String startDate = LocalDate.now().plusDays(-unit).toString(); TimeDefaultConfig timeDefaultConfig = dataSetSchema.getMetricTypeTimeDefaultConfig();
String endDate = startDate; DateConf dateInfo = new DateConf();
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) { //加上时间!=-1 判断
dateInfo.setDateMode(DateConf.DateMode.BETWEEN); if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit())
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) { && timeDefaultConfig.getUnit() != -1) {
dateInfo.setDateMode(DateConf.DateMode.RECENT); int unit = timeDefaultConfig.getUnit();
endDate = LocalDate.now().plusDays(-1).toString(); String startDate = LocalDate.now().plusDays(-unit).toString();
} String endDate = startDate;
dateInfo.setUnit(unit); if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
dateInfo.setPeriod(timeDefaultConfig.getPeriod()); dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
dateInfo.setStartDate(startDate); } else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
dateInfo.setEndDate(endDate); dateInfo.setDateMode(DateConf.DateMode.RECENT);
// 时间不为-1才设置时间所以移到这里 endDate = LocalDate.now().plusDays(-1).toString();
parseInfo.setDateInfo(dateInfo);
} }
dateInfo.setUnit(unit);
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
dateInfo.setStartDate(startDate);
dateInfo.setEndDate(endDate);
// 时间不为-1才设置时间所以移到这里
parseInfo.setDateInfo(dateInfo);
} }
} }
} }

View File

@@ -18,6 +18,11 @@ import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq; import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.chat.query.QueryManager; import com.tencent.supersonic.headless.chat.query.QueryManager;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.util.CollectionUtils;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -28,10 +33,6 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.util.CollectionUtils;
@Slf4j @Slf4j
public class QueryReqBuilder { public class QueryReqBuilder {
@@ -88,9 +89,12 @@ public class QueryReqBuilder {
} }
private static DateConf rewrite2Between(DateConf dateInfo) { private static DateConf rewrite2Between(DateConf dateInfo) {
if (Objects.isNull(dateInfo)) {
return null;
}
DateConf dateInfoNew = new DateConf(); DateConf dateInfoNew = new DateConf();
BeanUtils.copyProperties(dateInfo, dateInfoNew); BeanUtils.copyProperties(dateInfo, dateInfoNew);
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT.equals(dateInfo.getDateMode())) { if (DateConf.DateMode.RECENT.equals(dateInfo.getDateMode())) {
int unit = dateInfo.getUnit(); int unit = dateInfo.getUnit();
int days = 1; int days = 1;
switch (dateInfo.getPeriod()) { switch (dateInfo.getPeriod()) {
@@ -222,7 +226,7 @@ public class QueryReqBuilder {
} }
public static Set<Order> getOrder(Set<Order> existingOrders, public static Set<Order> getOrder(Set<Order> existingOrders,
AggregateTypeEnum aggregator, SchemaElement metric) { AggregateTypeEnum aggregator, SchemaElement metric) {
if (existingOrders != null && !existingOrders.isEmpty()) { if (existingOrders != null && !existingOrders.isEmpty()) {
return existingOrders; return existingOrders;
} }
@@ -259,7 +263,7 @@ public class QueryReqBuilder {
} }
public static QueryStructReq buildStructRatioReq(SemanticParseInfo parseInfo, SchemaElement metric, public static QueryStructReq buildStructRatioReq(SemanticParseInfo parseInfo, SchemaElement metric,
AggOperatorEnum aggOperatorEnum) { AggOperatorEnum aggOperatorEnum) {
QueryStructReq queryStructReq = buildStructReq(parseInfo); QueryStructReq queryStructReq = buildStructReq(parseInfo);
queryStructReq.setQueryType(QueryType.METRIC); queryStructReq.setQueryType(QueryType.METRIC);
queryStructReq.setOrders(new ArrayList<>()); queryStructReq.setOrders(new ArrayList<>());

View File

@@ -188,7 +188,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
return queryResp; return queryResp;
} catch (Exception e) { } catch (Exception e) {
log.error("exception in queryByStruct, e: ", e); log.error("exception in queryByReq:{}, e: ", queryReq, e);
state = TaskStatusEnum.ERROR; state = TaskStatusEnum.ERROR;
throw e; throw e;
} finally { } finally {
@@ -205,8 +205,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds); List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds);
// if the search results is null,search dimensionValue from database // if the search results is null,search dimensionValue from database
if (CollectionUtils.isEmpty(dimensionValues)) { if (CollectionUtils.isEmpty(dimensionValues)) {
semanticQueryResp = getDimensionValuesFromDb(dimensionValueReq, user); return getDimensionValuesFromDb(dimensionValueReq, user);
return semanticQueryResp;
} }
List<QueryColumn> columns = new ArrayList<>(); List<QueryColumn> columns = new ArrayList<>();
QueryColumn queryColumn = new QueryColumn(); QueryColumn queryColumn = new QueryColumn();
@@ -501,20 +500,23 @@ public class S2SemanticLayerService implements SemanticLayerService {
semanticParseInfo.setQueryType(QueryType.DETAIL); semanticParseInfo.setQueryType(QueryType.DETAIL);
semanticParseInfo.setMetrics(getMetrics(entityInfo)); semanticParseInfo.setMetrics(getMetrics(entityInfo));
semanticParseInfo.setDimensions(getDimensions(entityInfo)); semanticParseInfo.setDimensions(getDimensions(entityInfo));
DateConf dateInfo = new DateConf();
int unit = 1; if (dataSetSchema.containsPartitionDimensions()) {
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig(); DateConf dateInfo = new DateConf();
if (Objects.nonNull(timeDefaultConfig)) { int unit = 1;
unit = timeDefaultConfig.getUnit(); TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
String date = LocalDate.now().plusDays(-unit).toString(); if (Objects.nonNull(timeDefaultConfig)) {
dateInfo.setDateMode(DateConf.DateMode.BETWEEN); unit = timeDefaultConfig.getUnit();
dateInfo.setStartDate(date); String date = LocalDate.now().plusDays(-unit).toString();
dateInfo.setEndDate(date); dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
} else { dateInfo.setStartDate(date);
dateInfo.setUnit(unit); dateInfo.setEndDate(date);
dateInfo.setDateMode(DateConf.DateMode.RECENT); } else {
dateInfo.setUnit(unit);
dateInfo.setDateMode(DateConf.DateMode.RECENT);
}
semanticParseInfo.setDateInfo(dateInfo);
} }
semanticParseInfo.setDateInfo(dateInfo);
//add filter //add filter
QueryFilter chatFilter = getQueryFilter(entityInfo); QueryFilter chatFilter = getQueryFilter(entityInfo);
@@ -524,8 +526,8 @@ public class S2SemanticLayerService implements SemanticLayerService {
SemanticQueryResp queryResultWithColumns = null; SemanticQueryResp queryResultWithColumns = null;
try { try {
QueryStructReq queryStructReq = QueryReqBuilder.buildStructReq(semanticParseInfo); QuerySqlReq querySqlReq = QueryReqBuilder.buildStructReq(semanticParseInfo).convert();
queryResultWithColumns = queryByReq(queryStructReq, user); queryResultWithColumns = queryByReq(querySqlReq, user);
} catch (Exception e) { } catch (Exception e) {
log.warn("setMainModel queryByStruct error, e:", e); log.warn("setMainModel queryByStruct error, e:", e);
} }

View File

@@ -116,7 +116,7 @@ public class ParseInfoProcessor implements ResultProcessor {
QueryFilter dimensionFilter = new QueryFilter(); QueryFilter dimensionFilter = new QueryFilter();
dimensionFilter.setValue(expression.getFieldValue()); dimensionFilter.setValue(expression.getFieldValue());
SchemaElement schemaElement = fieldNameToElement.get(expression.getFieldName()); SchemaElement schemaElement = fieldNameToElement.get(expression.getFieldName());
if (Objects.isNull(schemaElement)) { if (Objects.isNull(schemaElement) || schemaElement.containsPartitionTime()) {
continue; continue;
} }
dimensionFilter.setName(schemaElement.getName()); dimensionFilter.setName(schemaElement.getName());
@@ -167,7 +167,7 @@ public class ParseInfoProcessor implements ResultProcessor {
} }
private boolean containOperators(FieldExpression expression, FilterOperatorEnum firstOperator, private boolean containOperators(FieldExpression expression, FilterOperatorEnum firstOperator,
FilterOperatorEnum... operatorEnums) { FilterOperatorEnum... operatorEnums) {
return (Arrays.asList(operatorEnums).contains(firstOperator) && Objects.nonNull( return (Arrays.asList(operatorEnums).contains(firstOperator) && Objects.nonNull(
expression.getFieldValue())); expression.getFieldValue()));
} }

View File

@@ -13,7 +13,6 @@ import com.tencent.supersonic.headless.chat.mapper.SchemaMapper;
import com.tencent.supersonic.headless.chat.parser.SemanticParser; import com.tencent.supersonic.headless.chat.parser.SemanticParser;
import com.tencent.supersonic.headless.chat.query.QueryManager; import com.tencent.supersonic.headless.chat.query.QueryManager;
import com.tencent.supersonic.headless.chat.query.SemanticQuery; import com.tencent.supersonic.headless.chat.query.SemanticQuery;
import com.tencent.supersonic.headless.chat.query.rule.RuleSemanticQuery;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService; import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
import com.tencent.supersonic.headless.server.processor.ResultProcessor; import com.tencent.supersonic.headless.server.processor.ResultProcessor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -108,9 +107,6 @@ public class ChatWorkflowEngine {
List<SemanticQuery> candidateQueries = queryCtx.getCandidateQueries(); List<SemanticQuery> candidateQueries = queryCtx.getCandidateQueries();
if (CollectionUtils.isNotEmpty(candidateQueries)) { if (CollectionUtils.isNotEmpty(candidateQueries)) {
for (SemanticQuery semanticQuery : candidateQueries) { for (SemanticQuery semanticQuery : candidateQueries) {
if (semanticQuery instanceof RuleSemanticQuery) {
continue;
}
for (SemanticCorrector corrector : semanticCorrectors) { for (SemanticCorrector corrector : semanticCorrectors) {
corrector.correct(queryCtx, semanticQuery.getParseInfo()); corrector.correct(queryCtx, semanticQuery.getParseInfo());
if (!ChatWorkflowState.CORRECTING.equals(queryCtx.getChatWorkflowState())) { if (!ChatWorkflowState.CORRECTING.equals(queryCtx.getChatWorkflowState())) {

View File

@@ -383,7 +383,6 @@ public class DictUtils {
fillStructDateBetween(queryStructReq, model, config.getDateConf().getUnit() - 1, 0); fillStructDateBetween(queryStructReq, model, config.getDateConf().getUnit() - 1, 0);
return; return;
} }
return;
} }
private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model, private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model,

View File

@@ -15,7 +15,6 @@ import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig; import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.DefaultDisplayInfo; import com.tencent.supersonic.headless.api.pojo.DefaultDisplayInfo;
import com.tencent.supersonic.headless.api.pojo.Dim; import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MetricTypeDefaultConfig; import com.tencent.supersonic.headless.api.pojo.MetricTypeDefaultConfig;
@@ -119,9 +118,6 @@ public class S2ArtistDemo extends S2BaseDemo {
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dim> dimensions = new ArrayList<>(); List<Dim> dimensions = new ArrayList<>();
Dim dimension1 = new Dim("", "imp_date", DimensionType.time.name(), 0);
dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1);
dimensions.add(new Dim("活跃区域", "act_area", dimensions.add(new Dim("活跃区域", "act_area",
DimensionType.categorical.name(), 1, 1)); DimensionType.categorical.name(), 1, 1));
dimensions.add(new Dim("代表作", "song_name", dimensions.add(new Dim("代表作", "song_name",
@@ -135,7 +131,7 @@ public class S2ArtistDemo extends S2BaseDemo {
Measure measure3 = new Measure("收藏量", "favor_cnt", "sum", 1); Measure measure3 = new Measure("收藏量", "favor_cnt", "sum", 1);
modelDetail.setMeasures(Lists.newArrayList(measure1, measure2, measure3)); modelDetail.setMeasures(Lists.newArrayList(measure1, measure2, measure3));
modelDetail.setQueryType("sql_query"); modelDetail.setQueryType("sql_query");
modelDetail.setSqlQuery("select imp_date, singer_name, act_area, song_name, genre, " modelDetail.setSqlQuery("select singer_name, act_area, song_name, genre, "
+ "js_play_cnt, down_cnt, favor_cnt from singer"); + "js_play_cnt, down_cnt, favor_cnt from singer");
modelReq.setModelDetail(modelDetail); modelReq.setModelDetail(modelDetail);
return modelService.createModel(modelReq, user); return modelService.createModel(modelReq, user);

View File

@@ -365,4 +365,5 @@ alter table s2_chat_memory add `side_info` TEXT DEFAULT NULL COMMENT '辅助信
alter table s2_chat_parse modify column `chat_id` int(11); alter table s2_chat_parse modify column `chat_id` int(11);
--20240806 --20240806
UPDATE `s2_dimension` SET `type` = 'identify' WHERE `type` in ('primary','foreign'); UPDATE `s2_dimension` SET `type` = 'identify' WHERE `type` in ('primary','foreign');
alter table singer drop column imp_date;

View File

@@ -17,53 +17,12 @@ MERGE INTO s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`create
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin'); values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
-- sample data -- sample data
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
---demo data for semantic and chat ---demo data for semantic and chat
MERGE INTO s2_user_department (user_name, department) values ('jack','HR'); MERGE INTO s2_user_department (user_name, department) values ('jack','HR');

View File

@@ -20,136 +20,23 @@ insert into s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`creat
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin'); values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
-- sample data -- sample data
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt) INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000); VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
-- demo data for semantic and chat -- demo data for semantic and chat
insert into s2_user_department (user_name, department) values ('jack','HR'); insert into s2_user_department (user_name, department) values ('jack','HR');

View File

@@ -413,7 +413,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info'; COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
CREATE TABLE IF NOT EXISTS `singer` ( CREATE TABLE IF NOT EXISTS `singer` (
`imp_date` varchar(200) NOT NULL,
`singer_name` varchar(200) NOT NULL, `singer_name` varchar(200) NOT NULL,
`act_area` varchar(200) NOT NULL, `act_area` varchar(200) NOT NULL,
`song_name` varchar(200) NOT NULL, `song_name` varchar(200) NOT NULL,
@@ -421,7 +420,7 @@ CREATE TABLE IF NOT EXISTS `singer` (
`js_play_cnt` bigINT DEFAULT NULL, `js_play_cnt` bigINT DEFAULT NULL,
`down_cnt` bigINT DEFAULT NULL, `down_cnt` bigINT DEFAULT NULL,
`favor_cnt` bigINT DEFAULT NULL, `favor_cnt` bigINT DEFAULT NULL,
PRIMARY KEY (`imp_date`, `singer_name`) PRIMARY KEY (`singer_name`)
); );
COMMENT ON TABLE singer IS 'singer_info'; COMMENT ON TABLE singer IS 'singer_info';

View File

@@ -17,7 +17,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE IF NOT EXISTS `singer` ( CREATE TABLE IF NOT EXISTS `singer` (
`imp_date` varchar(200) NOT NULL,
`singer_name` varchar(200) NOT NULL, `singer_name` varchar(200) NOT NULL,
`act_area` varchar(200) NOT NULL, `act_area` varchar(200) NOT NULL,
`song_name` varchar(200) NOT NULL, `song_name` varchar(200) NOT NULL,

View File

@@ -1,13 +1,12 @@
package com.tencent.supersonic.chat; package com.tencent.supersonic.chat;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
import com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum; import com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.QueryType; import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.request.QueryFilter; import com.tencent.supersonic.headless.api.pojo.request.QueryFilter;
import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
import com.tencent.supersonic.headless.chat.query.rule.detail.DetailFilterQuery; import com.tencent.supersonic.headless.chat.query.rule.detail.DetailFilterQuery;
import com.tencent.supersonic.util.DataUtils; import com.tencent.supersonic.util.DataUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -48,7 +47,6 @@ public class TagTest extends BaseTest {
expectedParseInfo.getDimensions().add(dim3); expectedParseInfo.getDimensions().add(dim3);
expectedParseInfo.getDimensions().add(dim4); expectedParseInfo.getDimensions().add(dim4);
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, startDay, startDay, 7));
expectedParseInfo.setQueryType(QueryType.DETAIL); expectedParseInfo.setQueryType(QueryType.DETAIL);
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);

View File

@@ -1,9 +1,10 @@
-- sample user -- sample user
MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbTD12g9wGXESwL7+o7xUW90=','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1); ---The default value for the password is 123456
MERGE INTO s2_user (id, `name`, password, display_name, email) values (2, 'jack','123456','jack','jack@xx.com'); MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbdktJJYWw6A3rEmBUPzbn/6DNeYnD+y3mAwDKEMS3KVT','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1);
MERGE INTO s2_user (id, `name`, password, display_name, email) values (3, 'tom','123456','tom','tom@xx.com'); MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (2, 'jack','c3VwZXJzb25pY0BiaWNvbWxGalmwa0h/trkh/3CWOYMDiku0Op1VmOfESIKmN0HG','MWERWefm/3hD6kYndF6JIg==','jack','jack@xx.com');
MERGE INTO s2_user (id, `name`, password, display_name, email, is_admin) values (4, 'lucy','123456','lucy','lucy@xx.com', 1); MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (3, 'tom','c3VwZXJzb25pY0BiaWNvbVWv0CZ6HzeX8GRUpw0C8NSaQ+0hE/dAcmzRpCFwAqxK','4WCPdcXXgT89QDHLML+3hg==','tom','tom@xx.com');
MERGE INTO s2_user (id, `name`, password, display_name, email) values (5, 'alice','123456','alice','alice@xx.com'); MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (4, 'lucy','c3VwZXJzb25pY0BiaWNvbc7Ychfu99lPL7rLmCkf/vgF4RASa4Z++Mxo1qlDCpci','3Jnpqob6uDoGLP9eCAg5Fw==','lucy','lucy@xx.com');
MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (5, 'alice','c3VwZXJzb25pY0BiaWNvbe9Z4F2/DVIfAJoN1HwUTuH1KgVuiusvfh7KkWYQSNHk','K9gGyX8OAK8aH8Myj6djqQ==','alice','alice@xx.com');
MERGE INTO s2_available_date_info(`id`,`item_id` ,`type` ,`date_format` ,`start_date` ,`end_date` ,`unavailable_date` ,`created_at` ,`created_by` ,`updated_at` ,`updated_by` ) MERGE INTO s2_available_date_info(`id`,`item_id` ,`type` ,`date_format` ,`start_date` ,`end_date` ,`unavailable_date` ,`created_at` ,`created_by` ,`updated_at` ,`updated_by` )
values (1 , 1, 'dimension', 'yyyy-MM-dd', DATEADD('DAY', -28, CURRENT_DATE()), DATEADD('DAY', -1, CURRENT_DATE()), '[]', '2023-06-01', 'admin', '2023-06-01', 'admin'); values (1 , 1, 'dimension', 'yyyy-MM-dd', DATEADD('DAY', -28, CURRENT_DATE()), DATEADD('DAY', -1, CURRENT_DATE()), '[]', '2023-06-01', 'admin', '2023-06-01', 'admin');
@@ -16,53 +17,12 @@ MERGE INTO s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`create
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin'); values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
-- sample data -- sample data
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000); MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
---demo data for semantic and chat ---demo data for semantic and chat
MERGE INTO s2_user_department (user_name, department) values ('jack','HR'); MERGE INTO s2_user_department (user_name, department) values ('jack','HR');
@@ -74,7 +34,17 @@ MERGE INTO s2_user_department (user_name, department) values ('john','strategy')
MERGE INTO s2_user_department (user_name, department) values ('alice','sales'); MERGE INTO s2_user_department (user_name, department) values ('alice','sales');
MERGE INTO s2_user_department (user_name, department) values ('dean','marketing'); MERGE INTO s2_user_department (user_name, department) values ('dean','marketing');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p4');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p2');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'alice', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p2');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p3');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p4');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'lucy', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'lucy', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p1'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p1');
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p4'); INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p4');
@@ -1090,12 +1060,12 @@ MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大'); MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国'); MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Topu','印度','女性','现代'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Topu','印度','女性','现代');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Enrique','美国','男性','蓝调'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Enrique','美国','男性','蓝调');
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Michel','英国','男性','流行'); MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Michel','英国','男性','流行');
MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (1,'Shrikanta','3.78 MB','3:45','mp4'); MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (1,'Shrikanta','3.78 MB','3:45','mp4');
MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (2,'Prity','4.12 MB','2:56','mp3'); MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (2,'Prity','4.12 MB','2:56','mp3');

View File

@@ -88,8 +88,8 @@ CREATE TABLE IF NOT EXISTS `s2_chat_memory` (
`question` varchar(655) , `question` varchar(655) ,
`agent_id` INT , `agent_id` INT ,
`db_schema` TEXT , `db_schema` TEXT ,
`side_info` TEXT ,
`s2_sql` TEXT , `s2_sql` TEXT ,
`side_info` TEXT ,
`status` char(10) , `status` char(10) ,
`llm_review` char(10) , `llm_review` char(10) ,
`llm_comment` TEXT, `llm_comment` TEXT,
@@ -271,39 +271,39 @@ COMMENT ON TABLE s2_canvas IS 'canvas table';
CREATE TABLE IF NOT EXISTS `s2_query_stat_info` ( CREATE TABLE IF NOT EXISTS `s2_query_stat_info` (
`id` INT NOT NULL AUTO_INCREMENT, `id` INT NOT NULL AUTO_INCREMENT,
`trace_id` varchar(200) DEFAULT NULL, -- query unique identifier `trace_id` varchar(200) DEFAULT NULL, -- query unique identifier
`model_id` INT DEFAULT NULL, `model_id` INT DEFAULT NULL,
`data_set_id` INT DEFAULT NULL, `data_set_id` INT DEFAULT NULL,
`user` varchar(200) DEFAULT NULL, `user` varchar(200) DEFAULT NULL,
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP , `created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ,
`query_type` varchar(200) DEFAULT NULL, -- the corresponding scene `query_type` varchar(200) DEFAULT NULL, -- the corresponding scene
`query_type_back` INT DEFAULT '0' , -- query type, 0-normal query, 1-pre-refresh type `query_type_back` INT DEFAULT '0' , -- query type, 0-normal query, 1-pre-refresh type
`query_sql_cmd`LONGVARCHAR , -- sql type request parameter `query_sql_cmd`LONGVARCHAR , -- sql type request parameter
`sql_cmd_md5` varchar(200) DEFAULT NULL, -- sql type request parameter md5 `sql_cmd_md5` varchar(200) DEFAULT NULL, -- sql type request parameter md5
`query_struct_cmd`LONGVARCHAR , -- struct type request parameter `query_struct_cmd`LONGVARCHAR , -- struct type request parameter
`struct_cmd_md5` varchar(200) DEFAULT NULL, -- struct type request parameter md5值 `struct_cmd_md5` varchar(200) DEFAULT NULL, -- struct type request parameter md5值
`sql`LONGVARCHAR , `sql`LONGVARCHAR ,
`sql_md5` varchar(200) DEFAULT NULL, -- sql md5 `sql_md5` varchar(200) DEFAULT NULL, -- sql md5
`query_engine` varchar(20) DEFAULT NULL, `query_engine` varchar(20) DEFAULT NULL,
`elapsed_ms` bigINT DEFAULT NULL, `elapsed_ms` bigINT DEFAULT NULL,
`query_state` varchar(20) DEFAULT NULL, `query_state` varchar(20) DEFAULT NULL,
`native_query` INT DEFAULT NULL, -- 1-detail query, 0-aggregation query `native_query` INT DEFAULT NULL, -- 1-detail query, 0-aggregation query
`start_date` varchar(50) DEFAULT NULL, `start_date` varchar(50) DEFAULT NULL,
`end_date` varchar(50) DEFAULT NULL, `end_date` varchar(50) DEFAULT NULL,
`dimensions`LONGVARCHAR , -- dimensions involved in sql `dimensions`LONGVARCHAR , -- dimensions involved in sql
`metrics`LONGVARCHAR , -- metric involved in sql `metrics`LONGVARCHAR , -- metric involved in sql
`select_cols`LONGVARCHAR , `select_cols`LONGVARCHAR ,
`agg_cols`LONGVARCHAR , `agg_cols`LONGVARCHAR ,
`filter_cols`LONGVARCHAR , `filter_cols`LONGVARCHAR ,
`group_by_cols`LONGVARCHAR , `group_by_cols`LONGVARCHAR ,
`order_by_cols`LONGVARCHAR , `order_by_cols`LONGVARCHAR ,
`use_result_cache` TINYINT DEFAULT '-1' , -- whether to hit the result cache `use_result_cache` TINYINT DEFAULT '-1' , -- whether to hit the result cache
`use_sql_cache` TINYINT DEFAULT '-1' , -- whether to hit the sql cache `use_sql_cache` TINYINT DEFAULT '-1' , -- whether to hit the sql cache
`sql_cache_key`LONGVARCHAR , -- sql cache key `sql_cache_key`LONGVARCHAR , -- sql cache key
`result_cache_key`LONGVARCHAR , -- result cache key `result_cache_key`LONGVARCHAR , -- result cache key
`query_opt_mode` varchar(50) DEFAULT NULL , `query_opt_mode` varchar(50) DEFAULT NULL ,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
) ; ) ;
COMMENT ON TABLE s2_query_stat_info IS 'query statistics table'; COMMENT ON TABLE s2_query_stat_info IS 'query statistics table';
@@ -386,7 +386,7 @@ CREATE TABLE IF NOT EXISTS s2_agent
enable_search int null, enable_search int null,
enable_memory_review int null, enable_memory_review int null,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
); COMMENT ON TABLE s2_agent IS 'agent information table'; ); COMMENT ON TABLE s2_agent IS 'agent information table';
-------demo for semantic and chat -------demo for semantic and chat
@@ -413,7 +413,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info'; COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
CREATE TABLE IF NOT EXISTS `singer` ( CREATE TABLE IF NOT EXISTS `singer` (
`imp_date` varchar(200) NOT NULL,
`singer_name` varchar(200) NOT NULL, `singer_name` varchar(200) NOT NULL,
`act_area` varchar(200) NOT NULL, `act_area` varchar(200) NOT NULL,
`song_name` varchar(200) NOT NULL, `song_name` varchar(200) NOT NULL,
@@ -421,7 +420,7 @@ CREATE TABLE IF NOT EXISTS `singer` (
`js_play_cnt` bigINT DEFAULT NULL, `js_play_cnt` bigINT DEFAULT NULL,
`down_cnt` bigINT DEFAULT NULL, `down_cnt` bigINT DEFAULT NULL,
`favor_cnt` bigINT DEFAULT NULL, `favor_cnt` bigINT DEFAULT NULL,
PRIMARY KEY (`imp_date`, `singer_name`) PRIMARY KEY (`singer_name`)
); );
COMMENT ON TABLE singer IS 'singer_info'; COMMENT ON TABLE singer IS 'singer_info';
@@ -466,10 +465,10 @@ COMMENT ON TABLE genre IS 'genre';
CREATE TABLE IF NOT EXISTS `artist` ( CREATE TABLE IF NOT EXISTS `artist` (
`artist_name` varchar(50) NOT NULL , -- genre name `artist_name` varchar(50) NOT NULL , -- genre name
`country` varchar(20) , `citizenship` varchar(20) ,
`gender` varchar(20) , `gender` varchar(20) ,
`g_name` varchar(50), `g_name` varchar(50),
PRIMARY KEY (`artist_name`,`country`) PRIMARY KEY (`artist_name`,`citizenship`)
); );
COMMENT ON TABLE artist IS 'artist'; COMMENT ON TABLE artist IS 'artist';
@@ -670,4 +669,4 @@ CREATE TABLE IF NOT EXISTS `s2_term` (
`updated_by` varchar(100) DEFAULT NULL , `updated_by` varchar(100) DEFAULT NULL ,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
); );
COMMENT ON TABLE s2_term IS 'term info'; COMMENT ON TABLE s2_term IS 'term info';