Merge remote-tracking branch 'origin/master'

This commit is contained in:
jerryjzhang
2024-08-10 18:28:04 +08:00
11 changed files with 87 additions and 42 deletions

View File

@@ -1,5 +1,4 @@
name: supersonic CentOS CI
on:
push:
branches:
@@ -16,7 +15,7 @@ jobs:
strategy:
matrix:
java-version: [8, 11] # 定义要测试的JDK版本
java-version: [8, 11, 21] # 定义要测试的JDK版本
steps:
- uses: actions/checkout@v2
@@ -36,10 +35,14 @@ jobs:
for i in {1..5}; do
dnf install -y java-1.8.0-openjdk-devel maven && break || sleep 15
done
else
elif [ ${{ matrix.java-version }} -eq 11 ]; then
for i in {1..5}; do
dnf install -y java-11-openjdk-devel maven && break || sleep 15
done
elif [ ${{ matrix.java-version }} -eq 21 ]; then
for i in {1..5}; do
dnf install -y java-21-openjdk-devel maven && break || sleep 15
done
fi
- name: Verify Java and Maven installation

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
java-version: [8, 11] # Define the JDK versions to test
java-version: [8, 11, 21] # Define the JDK versions to test
steps:
- uses: actions/checkout@v2

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
java-version: [8, 11] # 定义要测试的JDK版本
java-version: [8, 11, 21] # 定义要测试的JDK版本
steps:
- uses: actions/checkout@v2

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
java-version: [8, 11]
java-version: [8, 11, 21] # Add JDK 21 to the matrix
steps:
- uses: actions/checkout@v2
@@ -23,7 +23,7 @@ jobs:
uses: actions/setup-java@v2
with:
java-version: ${{ matrix.java-version }}
distribution: 'adopt'
distribution: 'adopt' # You might need to change this if 'adopt' doesn't support JDK 21
- name: Cache Maven packages
uses: actions/cache@v2

View File

@@ -12,10 +12,13 @@ public class ModelCluster {
private static final String split = "_";
private Set<Long> modelIds = new LinkedHashSet<>();
private String key;
public static ModelCluster build(Set<Long> modelIds) {
private boolean containsPartitionDimensions;
public static ModelCluster build(Set<Long> modelIds, Boolean containsPartitionDimensions) {
ModelCluster modelCluster = new ModelCluster();
modelCluster.setModelIds(modelIds);
modelCluster.setKey(StringUtils.join(modelIds, split));
modelCluster.setContainsPartitionDimensions(containsPartitionDimensions);
return modelCluster;
}
}

View File

@@ -13,6 +13,7 @@ import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.pojo.enums.EventType;
@@ -750,6 +751,9 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
if (modelCluster == null) {
throw new IllegalArgumentException("Invalid input parameters, unable to obtain valid metrics");
}
if (!modelCluster.isContainsPartitionDimensions()) {
queryMetricReq.setDateInfo(null);
}
//4. set groups
List<String> dimensionBizNames = dimensionResps.stream()
.filter(entry -> modelCluster.getModelIds().contains(entry.getModelId()))
@@ -759,8 +763,9 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
.map(SchemaItem::getBizName).collect(Collectors.toList());
QueryStructReq queryStructReq = new QueryStructReq();
if (queryMetricReq.getDateInfo().isGroupByDate()) {
queryStructReq.getGroups().add(queryMetricReq.getDateInfo().getGroupByTimeDimension());
DateConf dateInfo = queryMetricReq.getDateInfo();
if (Objects.nonNull(dateInfo) && dateInfo.isGroupByDate()) {
queryStructReq.getGroups().add(dateInfo.getGroupByTimeDimension());
}
if (!CollectionUtils.isEmpty(dimensionBizNames)) {
queryStructReq.getGroups().addAll(dimensionBizNames);
@@ -795,7 +800,7 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
}
queryStructReq.setDimensionFilters(filters);
//7. set dateInfo
queryStructReq.setDateInfo(queryMetricReq.getDateInfo());
queryStructReq.setDateInfo(dateInfo);
return queryStructReq;
}

View File

@@ -5,10 +5,13 @@ import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.response.ModelSchemaResp;
import com.tencent.supersonic.headless.server.pojo.ModelCluster;
import com.tencent.supersonic.headless.server.service.SchemaService;
import org.apache.commons.collections.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@@ -29,10 +32,21 @@ public class ModelClusterBuilder {
modelClusters.add(modelCluster);
}
}
return modelClusters.stream().map(ModelCluster::build)
return modelClusters.stream()
.map(modelCluster -> getModelCluster(modelIdToModelSchema, modelCluster))
.collect(Collectors.toMap(ModelCluster::getKey, value -> value, (k1, k2) -> k1));
}
private static ModelCluster getModelCluster(Map<Long, ModelSchemaResp> modelIdToModelSchema, Set<Long> modelIds) {
boolean containsPartitionDimensions = modelIds.stream()
.map(modelIdToModelSchema::get)
.filter(Objects::nonNull)
.anyMatch(modelSchemaResp -> CollectionUtils.isNotEmpty(modelSchemaResp.getTimeDimension()));
return ModelCluster.build(modelIds, containsPartitionDimensions);
}
private static void dfs(ModelSchemaResp model, Map<Long, ModelSchemaResp> modelMap,
Set<Long> visited, Set<Long> modelCluster) {
visited.add(model.getId());

12
pom.xml
View File

@@ -36,7 +36,7 @@
<pagehelper.version>6.1.0</pagehelper.version>
<pagehelper.spring.version>2.1.0</pagehelper.spring.version>
<mybatis.version>3.5.3</mybatis.version>
<lombok.version>1.18.20</lombok.version>
<lombok.version>1.18.30</lombok.version>
<guava.version>32.0.0-jre</guava.version>
<hanlp.version>portable-1.8.3</hanlp.version>
<hadoop.version>2.7.2</hadoop.version>
@@ -305,6 +305,16 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
<configuration>
<systemPropertyVariables>
<net.bytebuddy.experimental>true</net.bytebuddy.experimental>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>

View File

@@ -252,6 +252,7 @@ const ParseTip: React.FC<Props> = ({
});
return (
<div className={`${prefixCls}-tip-item-filter-content`}>
{!!dateInfo && (
<div className={tipItemOptionClass}>
<span className={`${prefixCls}-tip-item-filter-name`}></span>
{nativeQuery ? (
@@ -280,6 +281,7 @@ const ParseTip: React.FC<Props> = ({
/>
)}
</div>
)}
{filters?.map((filter: any, index: number) => (
<FilterItem
modelId={modelId!}

View File

@@ -12,6 +12,7 @@ import { exportTextFile } from '../../utils/utils';
type Props = {
agentId?: number;
queryId: number;
question: string;
llmReq?: any;
llmResp?: any;
integrateSystem?: string;
@@ -23,6 +24,7 @@ type Props = {
const SqlItem: React.FC<Props> = ({
agentId,
queryId,
question,
llmReq,
llmResp,
integrateSystem,
@@ -126,6 +128,11 @@ ${format(sqlInfo.querySQL)}
const onExportLog = () => {
let text = '';
if (question) {
text += `
问题:${question}
`;
}
if (llmReq) {
text += getSchemaMapText();
}

View File

@@ -353,6 +353,7 @@ const ChatItem: React.FC<Props> = ({
<SqlItem
agentId={agentId}
queryId={parseInfo.queryId}
question={msg}
llmReq={llmReq}
llmResp={llmResp}
integrateSystem={integrateSystem}