2 Commits

Author SHA1 Message Date
mislayming
bda4bdda77 Merge aaf2d46a56 into d1e5e8777a 2025-02-18 21:56:14 +08:00
jerryjzhang
d1e5e8777a (improvement)(chat)Try to find all fields in the same model to avoid unnecessary join.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run
2025-02-18 21:29:39 +08:00
9 changed files with 12800 additions and 15982 deletions

View File

@@ -10,6 +10,7 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.QueryState;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
@@ -188,12 +189,10 @@ public class SqlQueryParser implements QueryParser {
String modelName = entry.getKey();
entry.getValue().forEach(m -> {
if (fields.contains(m.getName()) || fields.contains(m.getBizName())) {
if (!ontologyQuery.getMetricMap().containsKey(modelName)) {
ontologyQuery.getMetricMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getMetricMap().get(modelName).add(m);
ontologyQuery.getMetricMap().computeIfAbsent(modelName, k -> Sets.newHashSet())
.add(m);
fields.remove(m.getName());
fields.remove(m.getBizName());
}
@@ -207,33 +206,51 @@ public class SqlQueryParser implements QueryParser {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(entry.getKey())) {
ontologyQuery.getDimensionMap().put(entry.getKey(),
Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(entry.getKey()).add(d);
ontologyQuery.getDimensionMap()
.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
});
// if there are still fields not found belonging models, try to find in the models without
// querying metrics.
// second, try to find a model that has all the remaining fields, such that no further join
// is needed.
if (!fields.isEmpty()) {
Map<String, Set<DimSchemaResp>> model2dims = new HashMap<>();
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
model2dims.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
}
});
});
Optional<Map.Entry<String, Set<DimSchemaResp>>> modelEntry = model2dims.entrySet()
.stream().filter(entry -> entry.getValue().size() == fields.size()).findFirst();
if (modelEntry.isPresent()) {
ontologyQuery.getDimensionMap().put(modelEntry.get().getKey(),
modelEntry.get().getValue());
ontologyQuery.getModelMap().put(modelEntry.get().getKey(),
ontology.getModelMap().get(modelEntry.get().getKey()));
fields.clear();
}
}
// finally if there are still fields not found belonging models, try to find in the models
// iteratively
if (!fields.isEmpty()) {
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
ontologyQuery.getDimensionMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(modelName).add(d);
ontologyQuery.getDimensionMap()
.computeIfAbsent(modelName, k -> Sets.newHashSet()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}

View File

@@ -209,6 +209,5 @@
},
"engines": {
"node": ">=16"
},
"packageManager": "pnpm@9.12.3+sha512.cce0f9de9c5a7c95bef944169cc5dfe8741abfb145078c0d508b868056848a87c81e626246cb60967cbd7fd29a6c062ef73ff840d96b3c86c40ac92cf4a813ee"
}
}
}

View File

@@ -9,7 +9,7 @@ import {
RangeValue,
SimilarQuestionType,
} from '../../common/type';
import { createContext, useEffect, useState } from 'react';
import { createContext, useEffect, useRef, useState } from 'react';
import { chatExecute, chatParse, queryData, deleteQuery, switchEntity } from '../../service';
import { PARSE_ERROR_TIP, PREFIX_CLS, SEARCH_EXCEPTION_TIP } from '../../common/constants';
import { message, Spin } from 'antd';
@@ -490,7 +490,9 @@ const ChatItem: React.FC<Props> = ({
onSwitchEntity={onSwitchEntity}
onFiltersChange={onFiltersChange}
onDateInfoChange={onDateInfoChange}
onRefresh={onRefresh}
onRefresh={() => {
onRefresh();
}}
handlePresetClick={handlePresetClick}
/>
)}

View File

@@ -40,12 +40,6 @@ const BarChart: React.FC<Props> = ({
}) => {
const chartRef = useRef<any>();
const instanceRef = useRef<ECharts>();
const { downloadChartAsImage } = useExportByEcharts({
instanceRef,
question,
});
const { register } = useContext(ChartItemContext);
const { queryColumns, queryResults, entityInfo } = data;
@@ -195,6 +189,13 @@ const BarChart: React.FC<Props> = ({
const prefixCls = `${PREFIX_CLS}-bar`;
const { downloadChartAsImage } = useExportByEcharts({
instanceRef,
question,
});
const { register } = useContext(ChartItemContext);
register('downloadChartAsImage', downloadChartAsImage);
return (

View File

@@ -93,9 +93,7 @@ export const getFormattedValue = (value: number | string, remainZero?: boolean)
export const formatNumberWithCN = (num: number) => {
if (isNaN(num)) return '-';
if (num >= 100000000) {
return (num / 100000000).toFixed(1) + '亿';
} else if (num >= 10000) {
if (num >= 10000) {
return (num / 10000).toFixed(1) + '万';
} else {
return formatByDecimalPlaces(num, 2);

View File

@@ -4,9 +4,5 @@ export default {
target: 'http://127.0.0.1:9080',
changeOrigin: true,
},
'/aibi/api/': {
target: 'http://127.0.0.1:9080',
changeOrigin: true,
},
},
};

View File

@@ -12,7 +12,6 @@ import { ISemantic } from '../../data';
import { ColumnsConfig } from '../../components/TableColumnRender';
import ViewSearchFormModal from './ViewSearchFormModal';
import { toDatasetEditPage } from '@/pages/SemanticModel/utils';
import UploadFile from './UploadFile';
type Props = {
// dataSetList: ISemantic.IDatasetItem[];
@@ -93,6 +92,9 @@ const DataSetTable: React.FC<Props> = ({ disabledEdit = false }) => {
<a
onClick={() => {
toDatasetEditPage(record.domainId, record.id, 'relation');
// setEditFormStep(1);
// setViewItem(record);
// setCreateDataSourceModalOpen(true);
}}
>
{name}
@@ -144,6 +146,9 @@ const DataSetTable: React.FC<Props> = ({ disabledEdit = false }) => {
key="metricEditBtn"
onClick={() => {
toDatasetEditPage(record.domainId, record.id);
// setEditFormStep(0);
// setViewItem(record);
// setCreateDataSourceModalOpen(true);
}}
>
@@ -184,12 +189,6 @@ const DataSetTable: React.FC<Props> = ({ disabledEdit = false }) => {
</Button>
)}
<UploadFile
key="uploadFile"
buttonType="link"
domainId={record.domainId}
datasetId={record.id}
/>
<Popconfirm
title="确认删除?"
okText="是"
@@ -230,13 +229,6 @@ const DataSetTable: React.FC<Props> = ({ disabledEdit = false }) => {
disabledEdit
? [<></>]
: [
<UploadFile
key="uploadFile"
domainId={selectDomainId}
onFileUploaded={() => {
queryDataSetList();
}}
/>,
<Button
key="create"
type="primary"

View File

@@ -1,44 +0,0 @@
import { getToken } from '@/utils/utils';
import { UploadOutlined } from '@ant-design/icons';
import type { UploadProps } from 'antd';
import { Button, message, Upload } from 'antd';
type Props = {
buttonType?: string;
domainId?: number;
datasetId?: string;
onFileUploaded?: () => void;
};
const UploadFile = ({ buttonType, domainId, datasetId, onFileUploaded }: Props) => {
const props: UploadProps = {
name: 'multipartFile',
action: `/aibi/api/data/file/uploadFileNew?type=DATASET&domainId=${domainId}${
datasetId ? `&dataSetId=${datasetId}` : ''
}`,
showUploadList: false,
onChange(info) {
if (info.file.status !== 'uploading') {
console.log(info.file, info.fileList);
}
if (info.file.status === 'done') {
message.success('导入成功');
onFileUploaded?.();
} else if (info.file.status === 'error') {
message.error('导入失败');
}
},
};
return (
<Upload {...props}>
{buttonType === 'link' ? (
<a></a>
) : (
<Button icon={<UploadOutlined />}></Button>
)}
</Upload>
);
};
export default UploadFile;

28639
webapp/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff