mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-10 11:00:23 +00:00
(improvement)(chat) Rules, large models, and query dimension values support timelessness. (#1522)
This commit is contained in:
@@ -44,7 +44,7 @@ public class ModelDetail {
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
return dimensions.stream()
|
||||
.filter(dim -> DimensionType.time.name().equalsIgnoreCase(dim.getType()))
|
||||
.filter(dim -> DimensionType.partition_time.name().equalsIgnoreCase(dim.getType()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
@@ -168,99 +168,129 @@ public class QueryStructReq extends SemanticQueryReq {
|
||||
return result;
|
||||
}
|
||||
|
||||
private String buildSql(QueryStructReq queryStructReq, boolean isBizName) throws JSQLParserException {
|
||||
private String buildSql(QueryStructReq queryStructReq, boolean isBizName)
|
||||
throws JSQLParserException {
|
||||
ParenthesedSelect select = new ParenthesedSelect();
|
||||
//1.Set the select items (columns)
|
||||
PlainSelect plainSelect = new PlainSelect();
|
||||
|
||||
// 1. Set the select items (columns)
|
||||
plainSelect.setSelectItems(buildSelectItems(queryStructReq));
|
||||
|
||||
// 2. Set the table name
|
||||
plainSelect.setFromItem(new Table(queryStructReq.getTableName()));
|
||||
|
||||
// 3. Set the order by clause
|
||||
plainSelect.setOrderByElements(buildOrderByElements(queryStructReq));
|
||||
|
||||
// 4. Set the group by clause
|
||||
plainSelect.setGroupByElement(buildGroupByElement(queryStructReq));
|
||||
|
||||
// 5. Set the limit clause
|
||||
plainSelect.setLimit(buildLimit(queryStructReq));
|
||||
|
||||
select.setSelect(plainSelect);
|
||||
|
||||
// 6. Set where clause
|
||||
return addWhereClauses(select.toString(), queryStructReq, isBizName);
|
||||
}
|
||||
|
||||
private List<SelectItem<?>> buildSelectItems(QueryStructReq queryStructReq) {
|
||||
List<SelectItem<?>> selectItems = new ArrayList<>();
|
||||
List<String> groups = queryStructReq.getGroups();
|
||||
|
||||
if (!CollectionUtils.isEmpty(groups)) {
|
||||
for (String group : groups) {
|
||||
selectItems.add(new SelectItem(new Column(group)));
|
||||
}
|
||||
}
|
||||
|
||||
List<Aggregator> aggregators = queryStructReq.getAggregators();
|
||||
if (!CollectionUtils.isEmpty(aggregators)) {
|
||||
for (Aggregator aggregator : aggregators) {
|
||||
String columnName = aggregator.getColumn();
|
||||
if (queryStructReq.getQueryType().isNativeAggQuery()) {
|
||||
selectItems.add(new SelectItem(new Column(columnName)));
|
||||
} else {
|
||||
Function sumFunction = new Function();
|
||||
AggOperatorEnum func = aggregator.getFunc();
|
||||
if (AggOperatorEnum.UNKNOWN.equals(func)) {
|
||||
func = AggOperatorEnum.SUM;
|
||||
}
|
||||
sumFunction.setName(func.getOperator());
|
||||
if (AggOperatorEnum.COUNT_DISTINCT.equals(func)) {
|
||||
sumFunction.setName("count");
|
||||
sumFunction.setDistinct(true);
|
||||
}
|
||||
sumFunction.setParameters(new ExpressionList(new Column(columnName)));
|
||||
SelectItem selectExpressionItem = new SelectItem(sumFunction);
|
||||
String alias = StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
|
||||
selectExpressionItem.setAlias(new Alias(alias));
|
||||
selectItems.add(selectExpressionItem);
|
||||
}
|
||||
selectItems.add(buildAggregatorSelectItem(aggregator, queryStructReq));
|
||||
}
|
||||
}
|
||||
plainSelect.setSelectItems(selectItems);
|
||||
//2.Set the table name
|
||||
Table table = new Table(queryStructReq.getTableName());
|
||||
plainSelect.setFromItem(table);
|
||||
|
||||
//3.Set the order by clause
|
||||
return selectItems;
|
||||
}
|
||||
|
||||
private SelectItem buildAggregatorSelectItem(Aggregator aggregator, QueryStructReq queryStructReq) {
|
||||
String columnName = aggregator.getColumn();
|
||||
if (queryStructReq.getQueryType().isNativeAggQuery()) {
|
||||
return new SelectItem(new Column(columnName));
|
||||
} else {
|
||||
Function function = new Function();
|
||||
AggOperatorEnum func = aggregator.getFunc();
|
||||
if (AggOperatorEnum.UNKNOWN.equals(func)) {
|
||||
func = AggOperatorEnum.SUM;
|
||||
}
|
||||
function.setName(func.getOperator());
|
||||
if (AggOperatorEnum.COUNT_DISTINCT.equals(func)) {
|
||||
function.setName("count");
|
||||
function.setDistinct(true);
|
||||
}
|
||||
function.setParameters(new ExpressionList(new Column(columnName)));
|
||||
SelectItem selectExpressionItem = new SelectItem(function);
|
||||
String alias = StringUtils.isNotBlank(aggregator.getAlias()) ? aggregator.getAlias() : columnName;
|
||||
selectExpressionItem.setAlias(new Alias(alias));
|
||||
return selectExpressionItem;
|
||||
}
|
||||
}
|
||||
|
||||
private List<OrderByElement> buildOrderByElements(QueryStructReq queryStructReq) {
|
||||
List<Order> orders = queryStructReq.getOrders();
|
||||
List<OrderByElement> orderByElements = new ArrayList<>();
|
||||
|
||||
if (!CollectionUtils.isEmpty(orders)) {
|
||||
List<OrderByElement> orderByElements = new ArrayList<>();
|
||||
for (Order order : orders) {
|
||||
if (StringUtils.isBlank(order.getColumn())) {
|
||||
continue;
|
||||
}
|
||||
OrderByElement orderByElement = new OrderByElement();
|
||||
orderByElement.setExpression(new Column(order.getColumn()));
|
||||
orderByElement.setAsc(false);
|
||||
if (Constants.ASC_UPPER.equalsIgnoreCase(order.getDirection())) {
|
||||
orderByElement.setAsc(true);
|
||||
}
|
||||
orderByElement.setAsc(Constants.ASC_UPPER.equalsIgnoreCase(order.getDirection()));
|
||||
orderByElements.add(orderByElement);
|
||||
}
|
||||
plainSelect.setOrderByElements(orderByElements);
|
||||
}
|
||||
|
||||
//4.Set the group by clause
|
||||
return orderByElements;
|
||||
}
|
||||
|
||||
private GroupByElement buildGroupByElement(QueryStructReq queryStructReq) {
|
||||
List<String> groups = queryStructReq.getGroups();
|
||||
if (!CollectionUtils.isEmpty(groups) && !queryStructReq.getQueryType().isNativeAggQuery()) {
|
||||
GroupByElement groupByElement = new GroupByElement();
|
||||
for (String group : groups) {
|
||||
groupByElement.addGroupByExpression(new Column(group));
|
||||
}
|
||||
plainSelect.setGroupByElement(groupByElement);
|
||||
return groupByElement;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
//5.Set the limit clause
|
||||
if (Objects.nonNull(queryStructReq.getLimit())) {
|
||||
Limit limit = new Limit();
|
||||
limit.setRowCount(new LongValue(queryStructReq.getLimit()));
|
||||
plainSelect.setLimit(limit);
|
||||
private Limit buildLimit(QueryStructReq queryStructReq) {
|
||||
if (Objects.isNull(queryStructReq.getLimit())) {
|
||||
return null;
|
||||
}
|
||||
//select.setSelectBody(plainSelect);
|
||||
select.setSelect(plainSelect);
|
||||
Limit limit = new Limit();
|
||||
limit.setRowCount(new LongValue(queryStructReq.getLimit()));
|
||||
return limit;
|
||||
}
|
||||
|
||||
|
||||
//6.Set where
|
||||
List<Filter> dimensionFilters = queryStructReq.getDimensionFilters();
|
||||
private String addWhereClauses(String sql, QueryStructReq queryStructReq, boolean isBizName)
|
||||
throws JSQLParserException {
|
||||
SqlFilterUtils sqlFilterUtils = ContextUtils.getBean(SqlFilterUtils.class);
|
||||
String whereClause = sqlFilterUtils.getWhereClause(dimensionFilters, isBizName);
|
||||
String whereClause = sqlFilterUtils.getWhereClause(queryStructReq.getDimensionFilters(), isBizName);
|
||||
|
||||
String sql = select.toString();
|
||||
if (StringUtils.isNotBlank(whereClause)) {
|
||||
Expression expression = CCJSqlParserUtil.parseCondExpression(whereClause);
|
||||
sql = SqlAddHelper.addWhere(sql, expression);
|
||||
}
|
||||
|
||||
//7.Set DateInfo
|
||||
DateModeUtils dateModeUtils = ContextUtils.getBean(DateModeUtils.class);
|
||||
|
||||
String dateWhereStr = dateModeUtils.getDateWhereStr(queryStructReq.getDateInfo());
|
||||
|
||||
if (StringUtils.isNotBlank(dateWhereStr)) {
|
||||
Expression expression = CCJSqlParserUtil.parseCondExpression(dateWhereStr);
|
||||
sql = SqlAddHelper.addWhere(sql, expression);
|
||||
|
||||
@@ -7,6 +7,7 @@ import com.tencent.supersonic.common.pojo.ChatModelConfig;
|
||||
import com.tencent.supersonic.common.pojo.Text2SQLExemplar;
|
||||
import com.tencent.supersonic.common.pojo.enums.Text2SQLType;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryDataType;
|
||||
import com.tencent.supersonic.headless.api.pojo.SchemaMapInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
|
||||
@@ -66,4 +67,10 @@ public class ChatQueryContext {
|
||||
.collect(Collectors.toList());
|
||||
return candidateQueries;
|
||||
}
|
||||
|
||||
public boolean containsPartitionDimensions(Long dataSetId) {
|
||||
SemanticSchema semanticSchema = this.getSemanticSchema();
|
||||
DataSetSchema dataSetSchema = semanticSchema.getDataSetSchemaMap().get(dataSetId);
|
||||
return dataSetSchema.containsPartitionDimensions();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,10 @@ import com.tencent.supersonic.headless.api.pojo.SchemaElement;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.chat.ChatQueryContext;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
@@ -15,10 +19,6 @@ import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* basic semantic correction functionality, offering common methods and an
|
||||
@@ -61,14 +61,16 @@ public abstract class BaseSemanticCorrector implements SemanticCorrector {
|
||||
return elements.stream();
|
||||
})
|
||||
.collect(Collectors.toMap(a -> a, a -> a, (k1, k2) -> k1));
|
||||
result.put(TimeDimensionEnum.DAY.getChName(), TimeDimensionEnum.DAY.getChName());
|
||||
result.put(TimeDimensionEnum.MONTH.getChName(), TimeDimensionEnum.MONTH.getChName());
|
||||
result.put(TimeDimensionEnum.WEEK.getChName(), TimeDimensionEnum.WEEK.getChName());
|
||||
|
||||
result.put(TimeDimensionEnum.DAY.getName(), TimeDimensionEnum.DAY.getChName());
|
||||
result.put(TimeDimensionEnum.MONTH.getName(), TimeDimensionEnum.MONTH.getChName());
|
||||
result.put(TimeDimensionEnum.WEEK.getName(), TimeDimensionEnum.WEEK.getChName());
|
||||
if (chatQueryContext.containsPartitionDimensions(dataSetId)) {
|
||||
result.put(TimeDimensionEnum.DAY.getChName(), TimeDimensionEnum.DAY.getChName());
|
||||
result.put(TimeDimensionEnum.MONTH.getChName(), TimeDimensionEnum.MONTH.getChName());
|
||||
result.put(TimeDimensionEnum.WEEK.getChName(), TimeDimensionEnum.WEEK.getChName());
|
||||
|
||||
result.put(TimeDimensionEnum.DAY.getName(), TimeDimensionEnum.DAY.getChName());
|
||||
result.put(TimeDimensionEnum.MONTH.getName(), TimeDimensionEnum.MONTH.getChName());
|
||||
result.put(TimeDimensionEnum.WEEK.getName(), TimeDimensionEnum.WEEK.getChName());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@@ -61,11 +61,6 @@ public class ParserConfig extends ParameterConfig {
|
||||
"解析结果展示个数", "前端展示的解析个数",
|
||||
"number", "Parser相关配置");
|
||||
|
||||
public static final Parameter PARSER_S2SQL_ENABLE =
|
||||
new Parameter("s2.parser.s2sql.switch", "true",
|
||||
"", "",
|
||||
"bool", "Parser相关配置");
|
||||
|
||||
@Override
|
||||
public List<Parameter> getSysParameters() {
|
||||
return Lists.newArrayList(
|
||||
|
||||
@@ -86,8 +86,9 @@ public class LLMRequestService {
|
||||
&& Objects.nonNull(semanticSchema.getDataSetSchemaMap().get(dataSetId))) {
|
||||
TimeDefaultConfig timeDefaultConfig = semanticSchema.getDataSetSchemaMap()
|
||||
.get(dataSetId).getTagTypeTimeDefaultConfig();
|
||||
if (!Objects.equals(timeDefaultConfig.getUnit(), -1)) {
|
||||
// 数据集查询设置 时间不为-1时才添加 '数据日期' 字段
|
||||
if (!Objects.equals(timeDefaultConfig.getUnit(), -1)
|
||||
&& queryCtx.containsPartitionDimensions(dataSetId)) {
|
||||
// 数据集配置了数据日期字段,并查询设置 时间不为-1时才添加 '数据日期' 字段
|
||||
fieldNameList.add(TimeDimensionEnum.DAY.getChName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.chat.parser.rule;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.DateConf;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
|
||||
import com.tencent.supersonic.headless.chat.ChatQueryContext;
|
||||
import com.tencent.supersonic.headless.chat.parser.SemanticParser;
|
||||
import com.tencent.supersonic.headless.chat.query.QueryManager;
|
||||
@@ -10,7 +11,6 @@ import com.tencent.supersonic.headless.chat.query.rule.RuleSemanticQuery;
|
||||
import com.xkzhangsan.time.nlp.TimeNLP;
|
||||
import com.xkzhangsan.time.nlp.TimeNLPUtil;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
@@ -22,8 +22,6 @@ import java.util.Stack;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
;
|
||||
|
||||
/**
|
||||
* TimeRangeParser extracts time range specified in the user query
|
||||
* based on keyword matching.
|
||||
@@ -52,123 +50,127 @@ public class TimeRangeParser implements SemanticParser {
|
||||
}
|
||||
|
||||
if (dateConf != null) {
|
||||
if (queryContext.getCandidateQueries().size() > 0) {
|
||||
for (SemanticQuery query : queryContext.getCandidateQueries()) {
|
||||
query.getParseInfo().setDateInfo(dateConf);
|
||||
query.getParseInfo().setScore(query.getParseInfo().getScore()
|
||||
+ dateConf.getDetectWord().length());
|
||||
}
|
||||
} else if (QueryManager.containsRuleQuery(queryContext.getContextParseInfo().getQueryMode())) {
|
||||
RuleSemanticQuery semanticQuery = QueryManager.createRuleQuery(
|
||||
queryContext.getContextParseInfo().getQueryMode());
|
||||
// inherit parse info from context
|
||||
queryContext.getContextParseInfo().setDateInfo(dateConf);
|
||||
queryContext.getContextParseInfo().setScore(queryContext.getContextParseInfo().getScore()
|
||||
+ dateConf.getDetectWord().length());
|
||||
semanticQuery.setParseInfo(queryContext.getContextParseInfo());
|
||||
updateQueryContext(queryContext, dateConf);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateQueryContext(ChatQueryContext queryContext, DateConf dateConf) {
|
||||
if (!queryContext.getCandidateQueries().isEmpty()) {
|
||||
for (SemanticQuery query : queryContext.getCandidateQueries()) {
|
||||
query.getParseInfo().setDateInfo(dateConf);
|
||||
query.getParseInfo().setScore(query.getParseInfo().getScore() + dateConf.getDetectWord().length());
|
||||
}
|
||||
} else {
|
||||
SemanticParseInfo contextParseInfo = queryContext.getContextParseInfo();
|
||||
if (QueryManager.containsRuleQuery(contextParseInfo.getQueryMode())) {
|
||||
RuleSemanticQuery semanticQuery = QueryManager.createRuleQuery(contextParseInfo.getQueryMode());
|
||||
contextParseInfo.setDateInfo(dateConf);
|
||||
contextParseInfo.setScore(contextParseInfo.getScore() + dateConf.getDetectWord().length());
|
||||
semanticQuery.setParseInfo(contextParseInfo);
|
||||
queryContext.getCandidateQueries().add(semanticQuery);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DateConf parseDateCN(String queryText) {
|
||||
Date startDate = null;
|
||||
Date endDate;
|
||||
String detectWord = null;
|
||||
|
||||
List<TimeNLP> times = TimeNLPUtil.parse(queryText);
|
||||
if (times.size() > 0) {
|
||||
startDate = times.get(0).getTime();
|
||||
detectWord = times.get(0).getTimeExpression();
|
||||
} else {
|
||||
if (times.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Date startDate = times.get(0).getTime();
|
||||
String detectWord = times.get(0).getTimeExpression();
|
||||
Date endDate = times.size() > 1 ? times.get(1).getTime() : startDate;
|
||||
|
||||
if (times.size() > 1) {
|
||||
endDate = times.get(1).getTime();
|
||||
detectWord += "~" + times.get(0).getTimeExpression();
|
||||
} else {
|
||||
endDate = startDate;
|
||||
detectWord += "~" + times.get(1).getTimeExpression();
|
||||
}
|
||||
|
||||
return getDateConf(startDate, endDate, detectWord);
|
||||
}
|
||||
|
||||
private DateConf parseDateNumber(String queryText) {
|
||||
String startDate;
|
||||
String endDate = null;
|
||||
String detectWord = null;
|
||||
|
||||
Matcher dateMatcher = DATE_PATTERN_NUMBER.matcher(queryText);
|
||||
if (dateMatcher.find()) {
|
||||
startDate = dateMatcher.group();
|
||||
detectWord = startDate;
|
||||
} else {
|
||||
if (!dateMatcher.find()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (dateMatcher.find()) {
|
||||
endDate = dateMatcher.group();
|
||||
detectWord += "~" + endDate;
|
||||
String startDateStr = dateMatcher.group();
|
||||
String detectWord = startDateStr;
|
||||
String endDateStr = dateMatcher.find() ? dateMatcher.group() : startDateStr;
|
||||
|
||||
if (!startDateStr.equals(endDateStr)) {
|
||||
detectWord += "~" + endDateStr;
|
||||
}
|
||||
|
||||
endDate = endDate != null ? endDate : startDate;
|
||||
|
||||
try {
|
||||
return getDateConf(DATE_FORMAT_NUMBER.parse(startDate), DATE_FORMAT_NUMBER.parse(endDate), detectWord);
|
||||
Date startDate = DATE_FORMAT_NUMBER.parse(startDateStr);
|
||||
Date endDate = DATE_FORMAT_NUMBER.parse(endDateStr);
|
||||
return getDateConf(startDate, endDate, detectWord);
|
||||
} catch (ParseException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private DateConf parseRecent(String queryText) {
|
||||
Matcher m = RECENT_PATTERN_CN.matcher(queryText);
|
||||
if (m.matches()) {
|
||||
int num = 0;
|
||||
String enNum = m.group("enNum");
|
||||
String zhNum = m.group("zhNum");
|
||||
if (enNum != null) {
|
||||
num = Integer.parseInt(enNum);
|
||||
} else if (zhNum != null) {
|
||||
num = zhNumParse(zhNum);
|
||||
}
|
||||
if (num > 0) {
|
||||
DateConf info = new DateConf();
|
||||
String zhPeriod = m.group("zhPeriod");
|
||||
int days;
|
||||
switch (zhPeriod) {
|
||||
case "周":
|
||||
days = 7;
|
||||
info.setPeriod(Constants.WEEK);
|
||||
break;
|
||||
case "月":
|
||||
days = 30;
|
||||
info.setPeriod(Constants.MONTH);
|
||||
break;
|
||||
case "年":
|
||||
days = 365;
|
||||
info.setPeriod(Constants.YEAR);
|
||||
break;
|
||||
default:
|
||||
days = 1;
|
||||
info.setPeriod(Constants.DAY);
|
||||
}
|
||||
days = days * num;
|
||||
info.setDateMode(DateConf.DateMode.RECENT);
|
||||
String detectWord = "近" + num + zhPeriod;
|
||||
if (StringUtils.isNotEmpty(m.group("periodStr"))) {
|
||||
detectWord = m.group("periodStr");
|
||||
}
|
||||
info.setDetectWord(detectWord);
|
||||
info.setStartDate(LocalDate.now().minusDays(days).toString());
|
||||
info.setEndDate(LocalDate.now().minusDays(1).toString());
|
||||
info.setUnit(num);
|
||||
|
||||
return info;
|
||||
}
|
||||
Matcher matcher = RECENT_PATTERN_CN.matcher(queryText);
|
||||
if (!matcher.matches()) {
|
||||
return null;
|
||||
}
|
||||
int num = parseNumber(matcher);
|
||||
if (num <= 0) {
|
||||
return null;
|
||||
}
|
||||
String zhPeriod = matcher.group("zhPeriod");
|
||||
int days = getDaysByPeriod(zhPeriod) * num;
|
||||
String detectWord = matcher.group("periodStr");
|
||||
|
||||
return null;
|
||||
DateConf info = new DateConf();
|
||||
info.setPeriod(getPeriodConstant(zhPeriod));
|
||||
info.setDateMode(DateConf.DateMode.RECENT);
|
||||
info.setDetectWord(detectWord);
|
||||
info.setStartDate(LocalDate.now().minusDays(days).toString());
|
||||
info.setEndDate(LocalDate.now().minusDays(1).toString());
|
||||
info.setUnit(num);
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
private int parseNumber(Matcher matcher) {
|
||||
String enNum = matcher.group("enNum");
|
||||
String zhNum = matcher.group("zhNum");
|
||||
if (enNum != null) {
|
||||
return Integer.parseInt(enNum);
|
||||
} else if (zhNum != null) {
|
||||
return zhNumParse(zhNum);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int getDaysByPeriod(String zhPeriod) {
|
||||
switch (zhPeriod) {
|
||||
case "周":
|
||||
return 7;
|
||||
case "月":
|
||||
return 30;
|
||||
case "年":
|
||||
return 365;
|
||||
default:
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private String getPeriodConstant(String zhPeriod) {
|
||||
switch (zhPeriod) {
|
||||
case "周":
|
||||
return Constants.WEEK;
|
||||
case "月":
|
||||
return Constants.MONTH;
|
||||
case "年":
|
||||
return Constants.YEAR;
|
||||
default:
|
||||
return Constants.DAY;
|
||||
}
|
||||
}
|
||||
|
||||
private int zhNumParse(String zhNumStr) {
|
||||
@@ -176,10 +178,9 @@ public class TimeRangeParser implements SemanticParser {
|
||||
String numStr = "一二三四五六七八九";
|
||||
String unitStr = "十百千万亿";
|
||||
|
||||
String[] ssArr = zhNumStr.split("");
|
||||
for (String e : ssArr) {
|
||||
int numIndex = numStr.indexOf(e);
|
||||
int unitIndex = unitStr.indexOf(e);
|
||||
for (char c : zhNumStr.toCharArray()) {
|
||||
int numIndex = numStr.indexOf(c);
|
||||
int unitIndex = unitStr.indexOf(c);
|
||||
if (numIndex != -1) {
|
||||
stack.push(numIndex + 1);
|
||||
} else if (unitIndex != -1) {
|
||||
@@ -192,7 +193,7 @@ public class TimeRangeParser implements SemanticParser {
|
||||
}
|
||||
}
|
||||
|
||||
return stack.stream().mapToInt(s -> s).sum();
|
||||
return stack.stream().mapToInt(Integer::intValue).sum();
|
||||
}
|
||||
|
||||
private DateConf getDateConf(Date startDate, Date endDate, String detectWord) {
|
||||
@@ -207,5 +208,4 @@ public class TimeRangeParser implements SemanticParser {
|
||||
info.setDetectWord(detectWord);
|
||||
return info;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,12 +5,11 @@ import com.tencent.supersonic.common.pojo.Aggregator;
|
||||
import com.tencent.supersonic.common.pojo.Filter;
|
||||
import com.tencent.supersonic.common.pojo.Order;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.chat.parser.ParserConfig;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
|
||||
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
|
||||
import lombok.ToString;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -21,8 +20,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.tencent.supersonic.headless.chat.parser.ParserConfig.PARSER_S2SQL_ENABLE;
|
||||
|
||||
@Slf4j
|
||||
@ToString
|
||||
public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
|
||||
@@ -43,6 +40,19 @@ public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
|
||||
return QueryReqBuilder.buildStructReq(parseInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SemanticQueryReq buildSemanticQueryReq() {
|
||||
return QueryReqBuilder.buildS2SQLReq(parseInfo.getSqlInfo(), parseInfo.getDataSetId());
|
||||
}
|
||||
|
||||
protected void initS2SqlByStruct(DataSetSchema dataSetSchema) {
|
||||
QueryStructReq queryStructReq = convertQueryStruct();
|
||||
convertBizNameToName(dataSetSchema, queryStructReq);
|
||||
QuerySqlReq querySQLReq = queryStructReq.convert();
|
||||
parseInfo.getSqlInfo().setParsedS2SQL(querySQLReq.getSql());
|
||||
parseInfo.getSqlInfo().setCorrectedS2SQL(querySQLReq.getSql());
|
||||
}
|
||||
|
||||
protected void convertBizNameToName(DataSetSchema dataSetSchema, QueryStructReq queryStructReq) {
|
||||
Map<String, String> bizNameToName = dataSetSchema.getBizNameToName();
|
||||
bizNameToName.putAll(TimeDimensionEnum.getNameToNameMap());
|
||||
@@ -74,17 +84,4 @@ public abstract class BaseSemanticQuery implements SemanticQuery, Serializable {
|
||||
}
|
||||
}
|
||||
|
||||
protected void initS2SqlByStruct(DataSetSchema dataSetSchema) {
|
||||
ParserConfig parserConfig = ContextUtils.getBean(ParserConfig.class);
|
||||
boolean s2sqlEnable = Boolean.valueOf(parserConfig.getParameterValue(PARSER_S2SQL_ENABLE));
|
||||
if (!s2sqlEnable) {
|
||||
return;
|
||||
}
|
||||
QueryStructReq queryStructReq = convertQueryStruct();
|
||||
convertBizNameToName(dataSetSchema, queryStructReq);
|
||||
QuerySqlReq querySQLReq = queryStructReq.convert();
|
||||
parseInfo.getSqlInfo().setParsedS2SQL(querySQLReq.getSql());
|
||||
parseInfo.getSqlInfo().setCorrectedS2SQL(querySQLReq.getSql());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -3,10 +3,8 @@ package com.tencent.supersonic.headless.chat.query.llm.s2sql;
|
||||
import com.tencent.supersonic.auth.api.authentication.pojo.User;
|
||||
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
|
||||
import com.tencent.supersonic.headless.api.pojo.SqlInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
|
||||
import com.tencent.supersonic.headless.chat.query.QueryManager;
|
||||
import com.tencent.supersonic.headless.chat.query.llm.LLMSemanticQuery;
|
||||
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@@ -25,11 +23,6 @@ public class LLMSqlQuery extends LLMSemanticQuery {
|
||||
return QUERY_MODE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SemanticQueryReq buildSemanticQueryReq() {
|
||||
return QueryReqBuilder.buildS2SQLReq(parseInfo.getSqlInfo(), parseInfo.getDataSetId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initS2Sql(DataSetSchema dataSetSchema, User user) {
|
||||
SqlInfo sqlInfo = parseInfo.getSqlInfo();
|
||||
|
||||
@@ -27,6 +27,7 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -41,7 +42,7 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
|
||||
}
|
||||
|
||||
public List<SchemaElementMatch> match(List<SchemaElementMatch> candidateElementMatches,
|
||||
ChatQueryContext queryCtx) {
|
||||
ChatQueryContext queryCtx) {
|
||||
return queryMatcher.match(candidateElementMatches);
|
||||
}
|
||||
|
||||
@@ -56,20 +57,30 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
|
||||
|
||||
fillSchemaElement(parseInfo, semanticSchema);
|
||||
fillScore(parseInfo);
|
||||
fillDateConf(parseInfo, chatQueryContext.getContextParseInfo());
|
||||
fillDateConfByInherited(parseInfo, chatQueryContext);
|
||||
}
|
||||
|
||||
private void fillDateConf(SemanticParseInfo queryParseInfo, SemanticParseInfo chatParseInfo) {
|
||||
if (queryParseInfo.getDateInfo() != null || chatParseInfo.getDateInfo() == null) {
|
||||
public boolean needFillDateConf(ChatQueryContext chatQueryContext) {
|
||||
Long dataSetId = parseInfo.getDataSetId();
|
||||
if (Objects.isNull(dataSetId) || dataSetId <= 0L) {
|
||||
return false;
|
||||
}
|
||||
return chatQueryContext.containsPartitionDimensions(dataSetId);
|
||||
}
|
||||
|
||||
private void fillDateConfByInherited(SemanticParseInfo queryParseInfo, ChatQueryContext chatQueryContext) {
|
||||
SemanticParseInfo contextParseInfo = chatQueryContext.getContextParseInfo();
|
||||
if (queryParseInfo.getDateInfo() != null || contextParseInfo.getDateInfo() == null
|
||||
|| needFillDateConf(chatQueryContext)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ((QueryManager.isTagQuery(queryParseInfo.getQueryMode())
|
||||
&& QueryManager.isTagQuery(chatParseInfo.getQueryMode()))
|
||||
&& QueryManager.isTagQuery(contextParseInfo.getQueryMode()))
|
||||
|| (QueryManager.isMetricQuery(queryParseInfo.getQueryMode())
|
||||
&& QueryManager.isMetricQuery(chatParseInfo.getQueryMode()))) {
|
||||
&& QueryManager.isMetricQuery(contextParseInfo.getQueryMode()))) {
|
||||
// inherit date info from context
|
||||
queryParseInfo.setDateInfo(chatParseInfo.getDateInfo());
|
||||
queryParseInfo.setDateInfo(contextParseInfo.getDateInfo());
|
||||
queryParseInfo.getDateInfo().setInherited(true);
|
||||
}
|
||||
}
|
||||
@@ -142,13 +153,15 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
|
||||
}
|
||||
|
||||
private void addToFilters(Map<Long, List<SchemaElementMatch>> id2Values, SemanticParseInfo parseInfo,
|
||||
SemanticSchema semanticSchema, SchemaElementType entity) {
|
||||
SemanticSchema semanticSchema, SchemaElementType entity) {
|
||||
if (id2Values == null || id2Values.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
for (Entry<Long, List<SchemaElementMatch>> entry : id2Values.entrySet()) {
|
||||
SchemaElement dimension = semanticSchema.getElement(entity, entry.getKey());
|
||||
|
||||
if (dimension.containsPartitionTime()) {
|
||||
continue;
|
||||
}
|
||||
if (entry.getValue().size() == 1) {
|
||||
SchemaElementMatch schemaMatch = entry.getValue().get(0);
|
||||
QueryFilter dimensionFilter = new QueryFilter();
|
||||
@@ -173,34 +186,6 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
|
||||
}
|
||||
}
|
||||
|
||||
private void addToValues(SemanticSchema semanticSchema, SchemaElementType entity,
|
||||
Map<Long, List<SchemaElementMatch>> id2Values, SchemaElementMatch schemaMatch) {
|
||||
SchemaElement element = schemaMatch.getElement();
|
||||
SchemaElement entityElement = semanticSchema.getElement(entity, element.getId());
|
||||
if (entityElement != null) {
|
||||
if (id2Values.containsKey(element.getId())) {
|
||||
id2Values.get(element.getId()).add(schemaMatch);
|
||||
} else {
|
||||
id2Values.put(element.getId(), new ArrayList<>(Arrays.asList(schemaMatch)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SemanticQueryReq buildSemanticQueryReq() {
|
||||
String queryMode = parseInfo.getQueryMode();
|
||||
|
||||
if (parseInfo.getDataSetId() == null || StringUtils.isEmpty(queryMode)
|
||||
|| !QueryManager.containsRuleQuery(queryMode)) {
|
||||
// reach here some error may happen
|
||||
log.error("not find QueryMode");
|
||||
throw new RuntimeException("not find QueryMode");
|
||||
}
|
||||
|
||||
QueryStructReq queryStructReq = convertQueryStruct();
|
||||
return queryStructReq.convert(true);
|
||||
}
|
||||
|
||||
protected boolean isMultiStructQuery() {
|
||||
return false;
|
||||
}
|
||||
@@ -224,7 +209,7 @@ public abstract class RuleSemanticQuery extends BaseSemanticQuery {
|
||||
}
|
||||
|
||||
public static List<RuleSemanticQuery> resolve(Long dataSetId, List<SchemaElementMatch> candidateElementMatches,
|
||||
ChatQueryContext chatQueryContext) {
|
||||
ChatQueryContext chatQueryContext) {
|
||||
List<RuleSemanticQuery> matchedQueries = new ArrayList<>();
|
||||
for (RuleSemanticQuery semanticQuery : QueryManager.getRuleQueries()) {
|
||||
List<SchemaElementMatch> matches = semanticQuery.match(candidateElementMatches, chatQueryContext);
|
||||
|
||||
@@ -25,34 +25,35 @@ public abstract class DetailListQuery extends DetailSemanticQuery {
|
||||
|
||||
private void addEntityDetailAndOrderByMetric(ChatQueryContext chatQueryContext, SemanticParseInfo parseInfo) {
|
||||
Long dataSetId = parseInfo.getDataSetId();
|
||||
if (Objects.nonNull(dataSetId) && dataSetId > 0L) {
|
||||
DataSetSchema dataSetSchema = chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(dataSetId);
|
||||
if (dataSetSchema != null && Objects.nonNull(dataSetSchema.getEntity())) {
|
||||
Set<SchemaElement> dimensions = new LinkedHashSet<>();
|
||||
Set<SchemaElement> metrics = new LinkedHashSet<>();
|
||||
Set<Order> orders = new LinkedHashSet<>();
|
||||
TagTypeDefaultConfig tagTypeDefaultConfig = dataSetSchema.getTagTypeDefaultConfig();
|
||||
if (tagTypeDefaultConfig != null && tagTypeDefaultConfig.getDefaultDisplayInfo() != null) {
|
||||
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds())) {
|
||||
metrics = tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds()
|
||||
.stream().map(id -> {
|
||||
SchemaElement metric = dataSetSchema.getElement(SchemaElementType.METRIC, id);
|
||||
if (metric != null) {
|
||||
orders.add(new Order(metric.getBizName(), Constants.DESC_UPPER));
|
||||
}
|
||||
return metric;
|
||||
}).filter(Objects::nonNull).collect(Collectors.toSet());
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds())) {
|
||||
dimensions = tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds().stream()
|
||||
.map(id -> dataSetSchema.getElement(SchemaElementType.DIMENSION, id))
|
||||
.filter(Objects::nonNull).collect(Collectors.toSet());
|
||||
}
|
||||
if (Objects.isNull(dataSetId) || dataSetId <= 0L) {
|
||||
return;
|
||||
}
|
||||
DataSetSchema dataSetSchema = chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(dataSetId);
|
||||
if (dataSetSchema != null && Objects.nonNull(dataSetSchema.getEntity())) {
|
||||
Set<SchemaElement> dimensions = new LinkedHashSet<>();
|
||||
Set<SchemaElement> metrics = new LinkedHashSet<>();
|
||||
Set<Order> orders = new LinkedHashSet<>();
|
||||
TagTypeDefaultConfig tagTypeDefaultConfig = dataSetSchema.getTagTypeDefaultConfig();
|
||||
if (tagTypeDefaultConfig != null && tagTypeDefaultConfig.getDefaultDisplayInfo() != null) {
|
||||
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds())) {
|
||||
metrics = tagTypeDefaultConfig.getDefaultDisplayInfo().getMetricIds()
|
||||
.stream().map(id -> {
|
||||
SchemaElement metric = dataSetSchema.getElement(SchemaElementType.METRIC, id);
|
||||
if (metric != null) {
|
||||
orders.add(new Order(metric.getBizName(), Constants.DESC_UPPER));
|
||||
}
|
||||
return metric;
|
||||
}).filter(Objects::nonNull).collect(Collectors.toSet());
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds())) {
|
||||
dimensions = tagTypeDefaultConfig.getDefaultDisplayInfo().getDimensionIds().stream()
|
||||
.map(id -> dataSetSchema.getElement(SchemaElementType.DIMENSION, id))
|
||||
.filter(Objects::nonNull).collect(Collectors.toSet());
|
||||
}
|
||||
parseInfo.setDimensions(dimensions);
|
||||
parseInfo.setMetrics(metrics);
|
||||
parseInfo.setOrders(orders);
|
||||
}
|
||||
parseInfo.setDimensions(dimensions);
|
||||
parseInfo.setMetrics(metrics);
|
||||
parseInfo.setOrders(orders);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
@Slf4j
|
||||
@@ -39,28 +40,31 @@ public abstract class DetailSemanticQuery extends RuleSemanticQuery {
|
||||
|
||||
parseInfo.setQueryType(QueryType.DETAIL);
|
||||
parseInfo.setLimit(DETAIL_MAX_RESULTS);
|
||||
if (parseInfo.getDateInfo() == null) {
|
||||
DataSetSchema dataSetSchema =
|
||||
chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId());
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
|
||||
if (!needFillDateConf(chatQueryContext)) {
|
||||
return;
|
||||
}
|
||||
Map<Long, DataSetSchema> dataSetSchemaMap = chatQueryContext.getSemanticSchema().getDataSetSchemaMap();
|
||||
DataSetSchema dataSetSchema = dataSetSchemaMap.get(parseInfo.getDataSetId());
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
|
||||
|
||||
if (Objects.nonNull(timeDefaultConfig)
|
||||
&& Objects.nonNull(timeDefaultConfig.getUnit())
|
||||
&& timeDefaultConfig.getUnit() != -1) {
|
||||
DateConf dateInfo = new DateConf();
|
||||
if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit())
|
||||
&& timeDefaultConfig.getUnit() != -1) {
|
||||
int unit = timeDefaultConfig.getUnit();
|
||||
String startDate = LocalDate.now().plusDays(-unit).toString();
|
||||
String endDate = startDate;
|
||||
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
endDate = LocalDate.now().plusDays(-1).toString();
|
||||
}
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
|
||||
dateInfo.setStartDate(startDate);
|
||||
dateInfo.setEndDate(endDate);
|
||||
parseInfo.setDateInfo(dateInfo);
|
||||
int unit = timeDefaultConfig.getUnit();
|
||||
String startDate = LocalDate.now().plusDays(-unit).toString();
|
||||
String endDate = startDate;
|
||||
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
endDate = LocalDate.now().plusDays(-1).toString();
|
||||
}
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
|
||||
dateInfo.setStartDate(startDate);
|
||||
dateInfo.setEndDate(endDate);
|
||||
parseInfo.setDateInfo(dateInfo);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
package com.tencent.supersonic.headless.chat.query.rule.metric;
|
||||
|
||||
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ENTITY;
|
||||
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ID;
|
||||
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.OptionType.REQUIRED;
|
||||
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.RequireNumberType.AT_LEAST;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.Filter;
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ENTITY;
|
||||
import static com.tencent.supersonic.headless.api.pojo.SchemaElementType.ID;
|
||||
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.OptionType.REQUIRED;
|
||||
import static com.tencent.supersonic.headless.chat.query.rule.QueryMatchOption.RequireNumberType.AT_LEAST;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
|
||||
@@ -38,30 +38,35 @@ public abstract class MetricSemanticQuery extends RuleSemanticQuery {
|
||||
public void fillParseInfo(ChatQueryContext chatQueryContext) {
|
||||
super.fillParseInfo(chatQueryContext);
|
||||
parseInfo.setLimit(METRIC_MAX_RESULTS);
|
||||
if (parseInfo.getDateInfo() == null) {
|
||||
DataSetSchema dataSetSchema =
|
||||
chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId());
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getMetricTypeTimeDefaultConfig();
|
||||
DateConf dateInfo = new DateConf();
|
||||
//加上时间!=-1 判断
|
||||
if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit())
|
||||
&& timeDefaultConfig.getUnit() != -1) {
|
||||
int unit = timeDefaultConfig.getUnit();
|
||||
String startDate = LocalDate.now().plusDays(-unit).toString();
|
||||
String endDate = startDate;
|
||||
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
endDate = LocalDate.now().plusDays(-1).toString();
|
||||
}
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
|
||||
dateInfo.setStartDate(startDate);
|
||||
dateInfo.setEndDate(endDate);
|
||||
// 时间不为-1才设置时间,所以移到这里
|
||||
parseInfo.setDateInfo(dateInfo);
|
||||
fillDateInfo(chatQueryContext);
|
||||
}
|
||||
|
||||
private void fillDateInfo(ChatQueryContext chatQueryContext) {
|
||||
if (parseInfo.getDateInfo() != null || !needFillDateConf(chatQueryContext)) {
|
||||
return;
|
||||
}
|
||||
DataSetSchema dataSetSchema =
|
||||
chatQueryContext.getSemanticSchema().getDataSetSchemaMap().get(parseInfo.getDataSetId());
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getMetricTypeTimeDefaultConfig();
|
||||
DateConf dateInfo = new DateConf();
|
||||
//加上时间!=-1 判断
|
||||
if (Objects.nonNull(timeDefaultConfig) && Objects.nonNull(timeDefaultConfig.getUnit())
|
||||
&& timeDefaultConfig.getUnit() != -1) {
|
||||
int unit = timeDefaultConfig.getUnit();
|
||||
String startDate = LocalDate.now().plusDays(-unit).toString();
|
||||
String endDate = startDate;
|
||||
if (TimeMode.LAST.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
} else if (TimeMode.RECENT.equals(timeDefaultConfig.getTimeMode())) {
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
endDate = LocalDate.now().plusDays(-1).toString();
|
||||
}
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setPeriod(timeDefaultConfig.getPeriod());
|
||||
dateInfo.setStartDate(startDate);
|
||||
dateInfo.setEndDate(endDate);
|
||||
// 时间不为-1才设置时间,所以移到这里
|
||||
parseInfo.setDateInfo(dateInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,11 @@ import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.chat.query.QueryManager;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -28,10 +33,6 @@ import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
@Slf4j
|
||||
public class QueryReqBuilder {
|
||||
@@ -88,9 +89,12 @@ public class QueryReqBuilder {
|
||||
}
|
||||
|
||||
private static DateConf rewrite2Between(DateConf dateInfo) {
|
||||
if (Objects.isNull(dateInfo)) {
|
||||
return null;
|
||||
}
|
||||
DateConf dateInfoNew = new DateConf();
|
||||
BeanUtils.copyProperties(dateInfo, dateInfoNew);
|
||||
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT.equals(dateInfo.getDateMode())) {
|
||||
if (DateConf.DateMode.RECENT.equals(dateInfo.getDateMode())) {
|
||||
int unit = dateInfo.getUnit();
|
||||
int days = 1;
|
||||
switch (dateInfo.getPeriod()) {
|
||||
@@ -222,7 +226,7 @@ public class QueryReqBuilder {
|
||||
}
|
||||
|
||||
public static Set<Order> getOrder(Set<Order> existingOrders,
|
||||
AggregateTypeEnum aggregator, SchemaElement metric) {
|
||||
AggregateTypeEnum aggregator, SchemaElement metric) {
|
||||
if (existingOrders != null && !existingOrders.isEmpty()) {
|
||||
return existingOrders;
|
||||
}
|
||||
@@ -259,7 +263,7 @@ public class QueryReqBuilder {
|
||||
}
|
||||
|
||||
public static QueryStructReq buildStructRatioReq(SemanticParseInfo parseInfo, SchemaElement metric,
|
||||
AggOperatorEnum aggOperatorEnum) {
|
||||
AggOperatorEnum aggOperatorEnum) {
|
||||
QueryStructReq queryStructReq = buildStructReq(parseInfo);
|
||||
queryStructReq.setQueryType(QueryType.METRIC);
|
||||
queryStructReq.setOrders(new ArrayList<>());
|
||||
|
||||
@@ -188,7 +188,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
|
||||
return queryResp;
|
||||
} catch (Exception e) {
|
||||
log.error("exception in queryByStruct, e: ", e);
|
||||
log.error("exception in queryByReq:{}, e: ", queryReq, e);
|
||||
state = TaskStatusEnum.ERROR;
|
||||
throw e;
|
||||
} finally {
|
||||
@@ -205,8 +205,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds);
|
||||
// if the search results is null,search dimensionValue from database
|
||||
if (CollectionUtils.isEmpty(dimensionValues)) {
|
||||
semanticQueryResp = getDimensionValuesFromDb(dimensionValueReq, user);
|
||||
return semanticQueryResp;
|
||||
return getDimensionValuesFromDb(dimensionValueReq, user);
|
||||
}
|
||||
List<QueryColumn> columns = new ArrayList<>();
|
||||
QueryColumn queryColumn = new QueryColumn();
|
||||
@@ -501,20 +500,23 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
semanticParseInfo.setQueryType(QueryType.DETAIL);
|
||||
semanticParseInfo.setMetrics(getMetrics(entityInfo));
|
||||
semanticParseInfo.setDimensions(getDimensions(entityInfo));
|
||||
DateConf dateInfo = new DateConf();
|
||||
int unit = 1;
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
|
||||
if (Objects.nonNull(timeDefaultConfig)) {
|
||||
unit = timeDefaultConfig.getUnit();
|
||||
String date = LocalDate.now().plusDays(-unit).toString();
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
dateInfo.setStartDate(date);
|
||||
dateInfo.setEndDate(date);
|
||||
} else {
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
|
||||
if (dataSetSchema.containsPartitionDimensions()) {
|
||||
DateConf dateInfo = new DateConf();
|
||||
int unit = 1;
|
||||
TimeDefaultConfig timeDefaultConfig = dataSetSchema.getTagTypeTimeDefaultConfig();
|
||||
if (Objects.nonNull(timeDefaultConfig)) {
|
||||
unit = timeDefaultConfig.getUnit();
|
||||
String date = LocalDate.now().plusDays(-unit).toString();
|
||||
dateInfo.setDateMode(DateConf.DateMode.BETWEEN);
|
||||
dateInfo.setStartDate(date);
|
||||
dateInfo.setEndDate(date);
|
||||
} else {
|
||||
dateInfo.setUnit(unit);
|
||||
dateInfo.setDateMode(DateConf.DateMode.RECENT);
|
||||
}
|
||||
semanticParseInfo.setDateInfo(dateInfo);
|
||||
}
|
||||
semanticParseInfo.setDateInfo(dateInfo);
|
||||
|
||||
//add filter
|
||||
QueryFilter chatFilter = getQueryFilter(entityInfo);
|
||||
@@ -524,8 +526,8 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
|
||||
SemanticQueryResp queryResultWithColumns = null;
|
||||
try {
|
||||
QueryStructReq queryStructReq = QueryReqBuilder.buildStructReq(semanticParseInfo);
|
||||
queryResultWithColumns = queryByReq(queryStructReq, user);
|
||||
QuerySqlReq querySqlReq = QueryReqBuilder.buildStructReq(semanticParseInfo).convert();
|
||||
queryResultWithColumns = queryByReq(querySqlReq, user);
|
||||
} catch (Exception e) {
|
||||
log.warn("setMainModel queryByStruct error, e:", e);
|
||||
}
|
||||
|
||||
@@ -116,7 +116,7 @@ public class ParseInfoProcessor implements ResultProcessor {
|
||||
QueryFilter dimensionFilter = new QueryFilter();
|
||||
dimensionFilter.setValue(expression.getFieldValue());
|
||||
SchemaElement schemaElement = fieldNameToElement.get(expression.getFieldName());
|
||||
if (Objects.isNull(schemaElement)) {
|
||||
if (Objects.isNull(schemaElement) || schemaElement.containsPartitionTime()) {
|
||||
continue;
|
||||
}
|
||||
dimensionFilter.setName(schemaElement.getName());
|
||||
@@ -167,7 +167,7 @@ public class ParseInfoProcessor implements ResultProcessor {
|
||||
}
|
||||
|
||||
private boolean containOperators(FieldExpression expression, FilterOperatorEnum firstOperator,
|
||||
FilterOperatorEnum... operatorEnums) {
|
||||
FilterOperatorEnum... operatorEnums) {
|
||||
return (Arrays.asList(operatorEnums).contains(firstOperator) && Objects.nonNull(
|
||||
expression.getFieldValue()));
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import com.tencent.supersonic.headless.chat.mapper.SchemaMapper;
|
||||
import com.tencent.supersonic.headless.chat.parser.SemanticParser;
|
||||
import com.tencent.supersonic.headless.chat.query.QueryManager;
|
||||
import com.tencent.supersonic.headless.chat.query.SemanticQuery;
|
||||
import com.tencent.supersonic.headless.chat.query.rule.RuleSemanticQuery;
|
||||
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
|
||||
import com.tencent.supersonic.headless.server.processor.ResultProcessor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -108,9 +107,6 @@ public class ChatWorkflowEngine {
|
||||
List<SemanticQuery> candidateQueries = queryCtx.getCandidateQueries();
|
||||
if (CollectionUtils.isNotEmpty(candidateQueries)) {
|
||||
for (SemanticQuery semanticQuery : candidateQueries) {
|
||||
if (semanticQuery instanceof RuleSemanticQuery) {
|
||||
continue;
|
||||
}
|
||||
for (SemanticCorrector corrector : semanticCorrectors) {
|
||||
corrector.correct(queryCtx, semanticQuery.getParseInfo());
|
||||
if (!ChatWorkflowState.CORRECTING.equals(queryCtx.getChatWorkflowState())) {
|
||||
|
||||
@@ -383,7 +383,6 @@ public class DictUtils {
|
||||
fillStructDateBetween(queryStructReq, model, config.getDateConf().getUnit() - 1, 0);
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
private void fillStructDateBetween(QueryStructReq queryStructReq, ModelResp model,
|
||||
|
||||
@@ -15,7 +15,6 @@ import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
|
||||
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
|
||||
import com.tencent.supersonic.headless.api.pojo.DefaultDisplayInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.Dim;
|
||||
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
|
||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTypeDefaultConfig;
|
||||
@@ -119,9 +118,6 @@ public class S2ArtistDemo extends S2BaseDemo {
|
||||
modelDetail.setIdentifiers(identifiers);
|
||||
|
||||
List<Dim> dimensions = new ArrayList<>();
|
||||
Dim dimension1 = new Dim("", "imp_date", DimensionType.time.name(), 0);
|
||||
dimension1.setTypeParams(new DimensionTimeTypeParams());
|
||||
dimensions.add(dimension1);
|
||||
dimensions.add(new Dim("活跃区域", "act_area",
|
||||
DimensionType.categorical.name(), 1, 1));
|
||||
dimensions.add(new Dim("代表作", "song_name",
|
||||
@@ -135,7 +131,7 @@ public class S2ArtistDemo extends S2BaseDemo {
|
||||
Measure measure3 = new Measure("收藏量", "favor_cnt", "sum", 1);
|
||||
modelDetail.setMeasures(Lists.newArrayList(measure1, measure2, measure3));
|
||||
modelDetail.setQueryType("sql_query");
|
||||
modelDetail.setSqlQuery("select imp_date, singer_name, act_area, song_name, genre, "
|
||||
modelDetail.setSqlQuery("select singer_name, act_area, song_name, genre, "
|
||||
+ "js_play_cnt, down_cnt, favor_cnt from singer");
|
||||
modelReq.setModelDetail(modelDetail);
|
||||
return modelService.createModel(modelReq, user);
|
||||
|
||||
@@ -365,4 +365,5 @@ alter table s2_chat_memory add `side_info` TEXT DEFAULT NULL COMMENT '辅助信
|
||||
alter table s2_chat_parse modify column `chat_id` int(11);
|
||||
|
||||
--20240806
|
||||
UPDATE `s2_dimension` SET `type` = 'identify' WHERE `type` in ('primary','foreign');
|
||||
UPDATE `s2_dimension` SET `type` = 'identify' WHERE `type` in ('primary','foreign');
|
||||
alter table singer drop column imp_date;
|
||||
@@ -17,53 +17,12 @@ MERGE INTO s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`create
|
||||
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
|
||||
|
||||
-- sample data
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
|
||||
---demo data for semantic and chat
|
||||
MERGE INTO s2_user_department (user_name, department) values ('jack','HR');
|
||||
|
||||
@@ -20,136 +20,23 @@ insert into s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`creat
|
||||
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
|
||||
|
||||
-- sample data
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '周杰伦', '港台', '青花瓷', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '陈奕迅', '港台', '爱情转移', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '林俊杰', '港台', '美人鱼', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '张碧晨', '内地', '光的方向', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), '程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 5 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 3 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 2 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 6 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (imp_date, singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES (DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), 'Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('程响', '内地', '人间烟火', '国风', 1000000, 1000000, 1000000);
|
||||
|
||||
INSERT INTO singer (singer_name, act_area, song_name, genre, js_play_cnt, down_cnt, favor_cnt)
|
||||
VALUES ('Taylor Swift', '欧美', 'Love Story', '流行', 1000000, 1000000, 1000000);
|
||||
|
||||
-- demo data for semantic and chat
|
||||
insert into s2_user_department (user_name, department) values ('jack','HR');
|
||||
|
||||
@@ -413,7 +413,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
|
||||
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `singer` (
|
||||
`imp_date` varchar(200) NOT NULL,
|
||||
`singer_name` varchar(200) NOT NULL,
|
||||
`act_area` varchar(200) NOT NULL,
|
||||
`song_name` varchar(200) NOT NULL,
|
||||
@@ -421,7 +420,7 @@ CREATE TABLE IF NOT EXISTS `singer` (
|
||||
`js_play_cnt` bigINT DEFAULT NULL,
|
||||
`down_cnt` bigINT DEFAULT NULL,
|
||||
`favor_cnt` bigINT DEFAULT NULL,
|
||||
PRIMARY KEY (`imp_date`, `singer_name`)
|
||||
PRIMARY KEY (`singer_name`)
|
||||
);
|
||||
COMMENT ON TABLE singer IS 'singer_info';
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `singer` (
|
||||
`imp_date` varchar(200) NOT NULL,
|
||||
`singer_name` varchar(200) NOT NULL,
|
||||
`act_area` varchar(200) NOT NULL,
|
||||
`song_name` varchar(200) NOT NULL,
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
package com.tencent.supersonic.chat;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.DateConf;
|
||||
import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
|
||||
import com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.QueryType;
|
||||
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryFilter;
|
||||
import com.tencent.supersonic.chat.api.pojo.response.QueryResult;
|
||||
import com.tencent.supersonic.headless.chat.query.rule.detail.DetailFilterQuery;
|
||||
import com.tencent.supersonic.util.DataUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -48,7 +47,6 @@ public class TagTest extends BaseTest {
|
||||
expectedParseInfo.getDimensions().add(dim3);
|
||||
expectedParseInfo.getDimensions().add(dim4);
|
||||
|
||||
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, startDay, startDay, 7));
|
||||
expectedParseInfo.setQueryType(QueryType.DETAIL);
|
||||
|
||||
assertQueryResult(expectedResult, actualResult);
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
-- sample user
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbTD12g9wGXESwL7+o7xUW90=','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1);
|
||||
MERGE INTO s2_user (id, `name`, password, display_name, email) values (2, 'jack','123456','jack','jack@xx.com');
|
||||
MERGE INTO s2_user (id, `name`, password, display_name, email) values (3, 'tom','123456','tom','tom@xx.com');
|
||||
MERGE INTO s2_user (id, `name`, password, display_name, email, is_admin) values (4, 'lucy','123456','lucy','lucy@xx.com', 1);
|
||||
MERGE INTO s2_user (id, `name`, password, display_name, email) values (5, 'alice','123456','alice','alice@xx.com');
|
||||
---The default value for the password is 123456
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email, is_admin) values (1, 'admin','c3VwZXJzb25pY0BiaWNvbdktJJYWw6A3rEmBUPzbn/6DNeYnD+y3mAwDKEMS3KVT','jGl25bVBBBW96Qi9Te4V3w==','admin','admin@xx.com', 1);
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (2, 'jack','c3VwZXJzb25pY0BiaWNvbWxGalmwa0h/trkh/3CWOYMDiku0Op1VmOfESIKmN0HG','MWERWefm/3hD6kYndF6JIg==','jack','jack@xx.com');
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (3, 'tom','c3VwZXJzb25pY0BiaWNvbVWv0CZ6HzeX8GRUpw0C8NSaQ+0hE/dAcmzRpCFwAqxK','4WCPdcXXgT89QDHLML+3hg==','tom','tom@xx.com');
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (4, 'lucy','c3VwZXJzb25pY0BiaWNvbc7Ychfu99lPL7rLmCkf/vgF4RASa4Z++Mxo1qlDCpci','3Jnpqob6uDoGLP9eCAg5Fw==','lucy','lucy@xx.com');
|
||||
MERGE INTO s2_user (id, `name`, password, salt, display_name, email) values (5, 'alice','c3VwZXJzb25pY0BiaWNvbe9Z4F2/DVIfAJoN1HwUTuH1KgVuiusvfh7KkWYQSNHk','K9gGyX8OAK8aH8Myj6djqQ==','alice','alice@xx.com');
|
||||
|
||||
MERGE INTO s2_available_date_info(`id`,`item_id` ,`type` ,`date_format` ,`start_date` ,`end_date` ,`unavailable_date` ,`created_at` ,`created_by` ,`updated_at` ,`updated_by` )
|
||||
values (1 , 1, 'dimension', 'yyyy-MM-dd', DATEADD('DAY', -28, CURRENT_DATE()), DATEADD('DAY', -1, CURRENT_DATE()), '[]', '2023-06-01', 'admin', '2023-06-01', 'admin');
|
||||
@@ -16,53 +17,12 @@ MERGE INTO s2_canvas(`id`, `domain_id`, `type`, `config` ,`created_at` ,`create
|
||||
values (1, 1, 'modelEdgeRelation', '[{"source":"datasource-1","target":"datasource-3","type":"polyline","id":"edge-0.305251275235679741702883718912","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-94,"y":-137.5,"anchorIndex":0,"id":"-94|||-137.5"},"endPoint":{"x":-234,"y":-45,"anchorIndex":1,"id":"-234|||-45"},"sourceAnchor":2,"targetAnchor":1,"label":"模型关系编辑"},{"source":"datasource-1","target":"datasource-2","type":"polyline","id":"edge-0.466237264629309141702883756359","style":{"active":{"stroke":"rgb(95, 149, 255)","lineWidth":1},"selected":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"shadowColor":"rgb(95, 149, 255)","shadowBlur":10,"text-shape":{"fontWeight":500}},"highlight":{"stroke":"rgb(95, 149, 255)","lineWidth":2,"text-shape":{"fontWeight":500}},"inactive":{"stroke":"rgb(234, 234, 234)","lineWidth":1},"disable":{"stroke":"rgb(245, 245, 245)","lineWidth":1},"stroke":"#296df3","endArrow":true},"startPoint":{"x":-12,"y":-137.5,"anchorIndex":1,"id":"-12|||-137.5"},"endPoint":{"x":85,"y":31.5,"anchorIndex":0,"id":"85|||31.5"},"sourceAnchor":1,"targetAnchor":2,"label":"模型关系编辑"}]', '2023-06-01', 'admin', '2023-06-01', 'admin');
|
||||
|
||||
-- sample data
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), '程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -1, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -2, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -6, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (imp_date,singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES (DATEADD('DAY', -7, CURRENT_DATE()), 'Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('周杰伦', '港台','青花瓷','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('陈奕迅', '港台','爱情转移','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('林俊杰', '港台','美人鱼','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('张碧晨', '内地','光的方向','流行',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('程响', '内地','人间烟火','国风',1000000,1000000,1000000);
|
||||
MERGE INTO singer (singer_name,act_area, song_name,genre,js_play_cnt,down_cnt,favor_cnt) VALUES ('Taylor Swift', '欧美','Love Story','流行',1000000,1000000,1000000);
|
||||
|
||||
---demo data for semantic and chat
|
||||
MERGE INTO s2_user_department (user_name, department) values ('jack','HR');
|
||||
@@ -74,7 +34,17 @@ MERGE INTO s2_user_department (user_name, department) values ('john','strategy')
|
||||
MERGE INTO s2_user_department (user_name, department) values ('alice','sales');
|
||||
MERGE INTO s2_user_department (user_name, department) values ('dean','marketing');
|
||||
|
||||
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'jack', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p4');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p2');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'alice', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p2');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'john', 'p3');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'tom', 'p3');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'lucy', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (CURRENT_DATE(), 'dean', 'p4');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -5, CURRENT_DATE()), 'lucy', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -4, CURRENT_DATE()), 'jack', 'p1');
|
||||
INSERT INTO s2_pv_uv_statis (imp_date, user_name, page) VALUES (DATEADD('DAY', -3, CURRENT_DATE()), 'lucy', 'p4');
|
||||
@@ -1090,12 +1060,12 @@ MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('现代',8,'孟加拉国
|
||||
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('蓝调',7,'加拿大');
|
||||
MERGE INTO genre(g_name,rating,most_popular_in) VALUES ('流行',9,'美国');
|
||||
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore');
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul');
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间');
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Topu','印度','女性','现代');
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Enrique','美国','男性','蓝调');
|
||||
MERGE INTO artist(artist_name,country,gender,g_name) VALUES ('Michel','英国','男性','流行');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Shrikanta','印度','男性','tagore');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Prity','孟加拉国','女性','nazrul');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Farida','孟加拉国','女性','民间');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Topu','印度','女性','现代');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Enrique','美国','男性','蓝调');
|
||||
MERGE INTO artist(artist_name,citizenship,gender,g_name) VALUES ('Michel','英国','男性','流行');
|
||||
|
||||
MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (1,'Shrikanta','3.78 MB','3:45','mp4');
|
||||
MERGE INTO files(f_id,artist_name,file_size,duration,formats) VALUES (2,'Prity','4.12 MB','2:56','mp3');
|
||||
|
||||
@@ -88,8 +88,8 @@ CREATE TABLE IF NOT EXISTS `s2_chat_memory` (
|
||||
`question` varchar(655) ,
|
||||
`agent_id` INT ,
|
||||
`db_schema` TEXT ,
|
||||
`side_info` TEXT ,
|
||||
`s2_sql` TEXT ,
|
||||
`side_info` TEXT ,
|
||||
`status` char(10) ,
|
||||
`llm_review` char(10) ,
|
||||
`llm_comment` TEXT,
|
||||
@@ -271,39 +271,39 @@ COMMENT ON TABLE s2_canvas IS 'canvas table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `s2_query_stat_info` (
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`trace_id` varchar(200) DEFAULT NULL, -- query unique identifier
|
||||
`model_id` INT DEFAULT NULL,
|
||||
`data_set_id` INT DEFAULT NULL,
|
||||
`user` varchar(200) DEFAULT NULL,
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ,
|
||||
`query_type` varchar(200) DEFAULT NULL, -- the corresponding scene
|
||||
`query_type_back` INT DEFAULT '0' , -- query type, 0-normal query, 1-pre-refresh type
|
||||
`query_sql_cmd`LONGVARCHAR , -- sql type request parameter
|
||||
`sql_cmd_md5` varchar(200) DEFAULT NULL, -- sql type request parameter md5
|
||||
`query_struct_cmd`LONGVARCHAR , -- struct type request parameter
|
||||
`struct_cmd_md5` varchar(200) DEFAULT NULL, -- struct type request parameter md5值
|
||||
`sql`LONGVARCHAR ,
|
||||
`sql_md5` varchar(200) DEFAULT NULL, -- sql md5
|
||||
`query_engine` varchar(20) DEFAULT NULL,
|
||||
`elapsed_ms` bigINT DEFAULT NULL,
|
||||
`query_state` varchar(20) DEFAULT NULL,
|
||||
`native_query` INT DEFAULT NULL, -- 1-detail query, 0-aggregation query
|
||||
`start_date` varchar(50) DEFAULT NULL,
|
||||
`end_date` varchar(50) DEFAULT NULL,
|
||||
`dimensions`LONGVARCHAR , -- dimensions involved in sql
|
||||
`metrics`LONGVARCHAR , -- metric involved in sql
|
||||
`select_cols`LONGVARCHAR ,
|
||||
`agg_cols`LONGVARCHAR ,
|
||||
`filter_cols`LONGVARCHAR ,
|
||||
`group_by_cols`LONGVARCHAR ,
|
||||
`order_by_cols`LONGVARCHAR ,
|
||||
`use_result_cache` TINYINT DEFAULT '-1' , -- whether to hit the result cache
|
||||
`use_sql_cache` TINYINT DEFAULT '-1' , -- whether to hit the sql cache
|
||||
`sql_cache_key`LONGVARCHAR , -- sql cache key
|
||||
`result_cache_key`LONGVARCHAR , -- result cache key
|
||||
`query_opt_mode` varchar(50) DEFAULT NULL ,
|
||||
PRIMARY KEY (`id`)
|
||||
`id` INT NOT NULL AUTO_INCREMENT,
|
||||
`trace_id` varchar(200) DEFAULT NULL, -- query unique identifier
|
||||
`model_id` INT DEFAULT NULL,
|
||||
`data_set_id` INT DEFAULT NULL,
|
||||
`user` varchar(200) DEFAULT NULL,
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ,
|
||||
`query_type` varchar(200) DEFAULT NULL, -- the corresponding scene
|
||||
`query_type_back` INT DEFAULT '0' , -- query type, 0-normal query, 1-pre-refresh type
|
||||
`query_sql_cmd`LONGVARCHAR , -- sql type request parameter
|
||||
`sql_cmd_md5` varchar(200) DEFAULT NULL, -- sql type request parameter md5
|
||||
`query_struct_cmd`LONGVARCHAR , -- struct type request parameter
|
||||
`struct_cmd_md5` varchar(200) DEFAULT NULL, -- struct type request parameter md5值
|
||||
`sql`LONGVARCHAR ,
|
||||
`sql_md5` varchar(200) DEFAULT NULL, -- sql md5
|
||||
`query_engine` varchar(20) DEFAULT NULL,
|
||||
`elapsed_ms` bigINT DEFAULT NULL,
|
||||
`query_state` varchar(20) DEFAULT NULL,
|
||||
`native_query` INT DEFAULT NULL, -- 1-detail query, 0-aggregation query
|
||||
`start_date` varchar(50) DEFAULT NULL,
|
||||
`end_date` varchar(50) DEFAULT NULL,
|
||||
`dimensions`LONGVARCHAR , -- dimensions involved in sql
|
||||
`metrics`LONGVARCHAR , -- metric involved in sql
|
||||
`select_cols`LONGVARCHAR ,
|
||||
`agg_cols`LONGVARCHAR ,
|
||||
`filter_cols`LONGVARCHAR ,
|
||||
`group_by_cols`LONGVARCHAR ,
|
||||
`order_by_cols`LONGVARCHAR ,
|
||||
`use_result_cache` TINYINT DEFAULT '-1' , -- whether to hit the result cache
|
||||
`use_sql_cache` TINYINT DEFAULT '-1' , -- whether to hit the sql cache
|
||||
`sql_cache_key`LONGVARCHAR , -- sql cache key
|
||||
`result_cache_key`LONGVARCHAR , -- result cache key
|
||||
`query_opt_mode` varchar(50) DEFAULT NULL ,
|
||||
PRIMARY KEY (`id`)
|
||||
) ;
|
||||
COMMENT ON TABLE s2_query_stat_info IS 'query statistics table';
|
||||
|
||||
@@ -386,7 +386,7 @@ CREATE TABLE IF NOT EXISTS s2_agent
|
||||
enable_search int null,
|
||||
enable_memory_review int null,
|
||||
PRIMARY KEY (`id`)
|
||||
); COMMENT ON TABLE s2_agent IS 'agent information table';
|
||||
); COMMENT ON TABLE s2_agent IS 'agent information table';
|
||||
|
||||
|
||||
-------demo for semantic and chat
|
||||
@@ -413,7 +413,6 @@ CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
|
||||
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `singer` (
|
||||
`imp_date` varchar(200) NOT NULL,
|
||||
`singer_name` varchar(200) NOT NULL,
|
||||
`act_area` varchar(200) NOT NULL,
|
||||
`song_name` varchar(200) NOT NULL,
|
||||
@@ -421,7 +420,7 @@ CREATE TABLE IF NOT EXISTS `singer` (
|
||||
`js_play_cnt` bigINT DEFAULT NULL,
|
||||
`down_cnt` bigINT DEFAULT NULL,
|
||||
`favor_cnt` bigINT DEFAULT NULL,
|
||||
PRIMARY KEY (`imp_date`, `singer_name`)
|
||||
PRIMARY KEY (`singer_name`)
|
||||
);
|
||||
COMMENT ON TABLE singer IS 'singer_info';
|
||||
|
||||
@@ -466,10 +465,10 @@ COMMENT ON TABLE genre IS 'genre';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `artist` (
|
||||
`artist_name` varchar(50) NOT NULL , -- genre name
|
||||
`country` varchar(20) ,
|
||||
`citizenship` varchar(20) ,
|
||||
`gender` varchar(20) ,
|
||||
`g_name` varchar(50),
|
||||
PRIMARY KEY (`artist_name`,`country`)
|
||||
PRIMARY KEY (`artist_name`,`citizenship`)
|
||||
);
|
||||
COMMENT ON TABLE artist IS 'artist';
|
||||
|
||||
@@ -670,4 +669,4 @@ CREATE TABLE IF NOT EXISTS `s2_term` (
|
||||
`updated_by` varchar(100) DEFAULT NULL ,
|
||||
PRIMARY KEY (`id`)
|
||||
);
|
||||
COMMENT ON TABLE s2_term IS 'term info';
|
||||
COMMENT ON TABLE s2_term IS 'term info';
|
||||
|
||||
Reference in New Issue
Block a user