(improvement)(Chat)Integration of tags into chat. (#798)

This commit is contained in:
lexluo09
2024-03-08 16:59:51 +08:00
committed by GitHub
parent 9d27031543
commit 3ad18b0ee0
20 changed files with 108 additions and 69 deletions

View File

@@ -19,6 +19,7 @@ public class DataSetSchema {
private Set<SchemaElement> dimensions = new HashSet<>();
private Set<SchemaElement> dimensionValues = new HashSet<>();
private Set<SchemaElement> tags = new HashSet<>();
private Set<SchemaElement> tagValues = new HashSet<>();
private SchemaElement entity = new SchemaElement();
private QueryConfig queryConfig;
@@ -44,34 +45,8 @@ public class DataSetSchema {
case TAG:
element = tags.stream().filter(e -> e.getId() == elementID).findFirst();
break;
default:
}
if (element.isPresent()) {
return element.get();
} else {
return null;
}
}
public SchemaElement getElement(SchemaElementType elementType, String name) {
Optional<SchemaElement> element = Optional.empty();
switch (elementType) {
case ENTITY:
element = Optional.ofNullable(entity);
break;
case DATASET:
element = Optional.of(dataSet);
break;
case METRIC:
element = metrics.stream().filter(e -> name.equals(e.getName())).findFirst();
break;
case DIMENSION:
element = dimensions.stream().filter(e -> name.equals(e.getName())).findFirst();
break;
case VALUE:
element = dimensionValues.stream().filter(e -> name.equals(e.getName())).findFirst();
case TAG_VALUE:
element = tagValues.stream().filter(e -> e.getId() == elementID).findFirst();
break;
default:
}

View File

@@ -46,6 +46,9 @@ public class SemanticSchema implements Serializable {
case TAG:
element = getElementsById(elementID, getTags());
break;
case TAG_VALUE:
element = getElementsById(elementID, getTagValues());
break;
default:
}
@@ -91,13 +94,21 @@ public class SemanticSchema implements Serializable {
}
public List<SchemaElement> getTags(Long dataSetId) {
List<SchemaElement> tags = getTags();
return getElementsByDataSetId(dataSetId, tags);
}
public List<SchemaElement> getTagValues() {
List<SchemaElement> tags = new ArrayList<>();
dataSetSchemaList.stream().filter(schemaElement ->
dataSetId.equals(schemaElement.getDataSet().getDataSet()))
.forEach(d -> tags.addAll(d.getTags()));
dataSetSchemaList.stream().forEach(d -> tags.addAll(d.getTagValues()));
return tags;
}
public List<SchemaElement> getTagValues(Long dataSetId) {
List<SchemaElement> tags = getTagValues();
return getElementsByDataSetId(dataSetId, tags);
}
public List<SchemaElement> getMetrics() {
List<SchemaElement> metrics = new ArrayList<>();
dataSetSchemaList.stream().forEach(d -> metrics.addAll(d.getMetrics()));

View File

@@ -90,7 +90,12 @@ public class EmbeddingMatchStrategy extends BaseMatchStrategy<EmbeddingResult> {
.map(retrieveQueryResult -> {
List<Retrieval> retrievals = retrieveQueryResult.getRetrieval();
if (CollectionUtils.isNotEmpty(retrievals)) {
retrievals.removeIf(retrieval -> retrieval.getDistance() > distance.doubleValue());
retrievals.removeIf(retrieval -> {
if (!retrieveQueryResult.getQuery().contains(retrieval.getQuery())) {
return retrieval.getDistance() > distance.doubleValue();
}
return false;
});
}
return retrieveQueryResult;
})

View File

@@ -73,7 +73,8 @@ public class KeywordMapper extends BaseMapper {
if (element == null) {
continue;
}
if (element.getType().equals(SchemaElementType.VALUE)) {
if (element.getType().equals(SchemaElementType.VALUE) || element.getType()
.equals(SchemaElementType.TAG_VALUE)) {
element.setName(hanlpMapResult.getName());
}
Long frequency = wordNatureToFrequency.get(hanlpMapResult.getName() + nature);

View File

@@ -64,7 +64,7 @@ public class SearchMatchStrategy extends BaseMatchStrategy<HanlpMapResult> {
// remove entity name where search
hanlpMapResults = hanlpMapResults.stream().filter(entry -> {
List<String> natures = entry.getNatures().stream()
.filter(nature -> !nature.endsWith(DictWordType.ENTITY.getType()))
.filter(nature -> !nature.endsWith(DictWordType.ENTITY.getTypeWithSpilt()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(natures)) {
return false;

View File

@@ -51,6 +51,9 @@ public class DataSetSchemaBuilder {
Set<SchemaElement> tags = getTags(resp);
dataSetSchema.getTags().addAll(tags);
Set<SchemaElement> tagValues = getTagValues(resp);
dataSetSchema.getTagValues().addAll(tagValues);
SchemaElement entity = getEntity(resp);
if (Objects.nonNull(entity)) {
dataSetSchema.setEntity(entity);
@@ -91,6 +94,22 @@ public class DataSetSchemaBuilder {
return tags;
}
private static Set<SchemaElement> getTagValues(DataSetSchemaResp resp) {
Set<SchemaElement> dimensionValues = new HashSet<>();
for (TagResp tagResp : resp.getTags()) {
SchemaElement element = SchemaElement.builder()
.dataSet(resp.getId())
.model(tagResp.getModelId())
.id(tagResp.getId())
.name(tagResp.getName())
.bizName(tagResp.getBizName())
.type(SchemaElementType.TAG_VALUE)
.build();
dimensionValues.add(element);
}
return dimensionValues;
}
private static Set<SchemaElement> getDimensions(DataSetSchemaResp resp) {
Set<SchemaElement> dimensions = new HashSet<>();
for (DimSchemaResp dim : resp.getDimensions()) {

View File

@@ -32,7 +32,8 @@ public class SchemaDictUpdateListener implements ApplicationListener<DataEvent>
DictWord dictWord = new DictWord();
dictWord.setWord(dataItem.getName());
String sign = DictWordType.NATURE_SPILT;
String nature = sign + 1 + sign + dataItem.getId() + dataItem.getType().name().toLowerCase();
String suffixNature = DictWordType.getSuffixNature(dataItem.getType());
String nature = sign + dataItem.getModelId() + dataItem.getId() + suffixNature;
String natureWithFrequency = nature + " " + Constants.DEFAULT_FREQUENCY;
dictWord.setNature(nature);
dictWord.setNatureWithFrequency(natureWithFrequency);