diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/ChatQueryController.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/ChatQueryController.java
index e841f7f01..1e04eb04c 100644
--- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/ChatQueryController.java
+++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/rest/ChatQueryController.java
@@ -12,7 +12,7 @@ import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.request.DimensionValueReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryReq;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
diff --git a/common/pom.xml b/common/pom.xml
index 7f0003a1d..5c0c0a332 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -1,7 +1,7 @@
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
supersonic
com.tencent.supersonic
@@ -23,7 +23,6 @@
-
org.slf4j
slf4j-api
@@ -155,10 +154,6 @@
-
- dev.langchain4j
- langchain4j-spring-boot-starter
-
dev.langchain4j
langchain4j-open-ai
@@ -183,7 +178,6 @@
dev.langchain4j
langchain4j-embeddings-bge-small-zh
-
org.apache.logging.log4j
log4j-api
@@ -193,11 +187,45 @@
dev.langchain4j
langchain4j-embeddings
+
+ dev.langchain4j
+ langchain4j-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-anthropic-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-ollama-spring-boot-starter
+
+
+ dev.langchain4j
+ langchain4j-open-ai-spring-boot-starter
+
+
+ dev.langchain4j
+ langchain4j-azure-ai-search-spring-boot-starter
+
+
+ dev.langchain4j
+ langchain4j-azure-open-ai-spring-boot-starter
+
+
+ dev.langchain4j
+ langchain4j-embeddings-all-minilm-l6-v2-q
+
com.hankcs
hanlp
${hanlp.version}
+
+ org.apache.commons
+ commons-lang3
+
diff --git a/common/src/main/java/com/tencent/supersonic/common/util/S2ChatModelProvider.java b/common/src/main/java/com/tencent/supersonic/common/util/S2ChatModelProvider.java
index 461b6ea9c..7a9ce310b 100644
--- a/common/src/main/java/com/tencent/supersonic/common/util/S2ChatModelProvider.java
+++ b/common/src/main/java/com/tencent/supersonic/common/util/S2ChatModelProvider.java
@@ -4,8 +4,9 @@ import com.tencent.supersonic.common.config.LLMConfig;
import com.tencent.supersonic.common.pojo.enums.S2ModelProvider;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.localai.LocalAiChatModel;
-import dev.langchain4j.model.openai.FullOpenAiChatModel;
+import dev.langchain4j.model.openai.OpenAiChatModel;
import org.apache.commons.lang3.StringUtils;
+
import java.time.Duration;
public class S2ChatModelProvider {
@@ -17,7 +18,7 @@ public class S2ChatModelProvider {
return chatLanguageModel;
}
if (S2ModelProvider.OPEN_AI.name().equalsIgnoreCase(llmConfig.getProvider())) {
- return FullOpenAiChatModel
+ return OpenAiChatModel
.builder()
.baseUrl(llmConfig.getBaseUrl())
.modelName(llmConfig.getModelName())
diff --git a/common/src/main/java/dev/langchain4j/InProcess.java b/common/src/main/java/dev/langchain4j/InProcess.java
deleted file mode 100644
index 790eb39a1..000000000
--- a/common/src/main/java/dev/langchain4j/InProcess.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package dev.langchain4j;
-
-class InProcess {
-
- /***
- * the model local path
- */
- private String modelPath;
-
- /***
- * the model's vocabulary local path
- */
- private String vocabularyPath;
-
- public String getModelPath() {
- return modelPath;
- }
-
- public void setModelPath(String modelPath) {
- this.modelPath = modelPath;
- }
-
- public String getVocabularyPath() {
- return vocabularyPath;
- }
-
- public void setVocabularyPath(String vocabularyPath) {
- this.vocabularyPath = vocabularyPath;
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/dev/langchain4j/S2EmbeddingModel.java b/common/src/main/java/dev/langchain4j/S2EmbeddingModel.java
deleted file mode 100644
index e096168b2..000000000
--- a/common/src/main/java/dev/langchain4j/S2EmbeddingModel.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package dev.langchain4j;
-
-import com.tencent.supersonic.common.pojo.enums.S2ModelProvider;
-import org.springframework.boot.context.properties.NestedConfigurationProperty;
-
-class S2EmbeddingModel {
-
- @NestedConfigurationProperty
- private S2ModelProvider provider;
- @NestedConfigurationProperty
- private OpenAi openAi;
- @NestedConfigurationProperty
- private HuggingFace huggingFace;
- @NestedConfigurationProperty
- private LocalAi localAi;
-
- @NestedConfigurationProperty
- private InProcess inProcess;
-
- public S2ModelProvider getProvider() {
- return provider;
- }
-
- public void setProvider(S2ModelProvider provider) {
- this.provider = provider;
- }
-
- public OpenAi getOpenAi() {
- return openAi;
- }
-
- public void setOpenAi(OpenAi openAi) {
- this.openAi = openAi;
- }
-
- public HuggingFace getHuggingFace() {
- return huggingFace;
- }
-
- public void setHuggingFace(HuggingFace huggingFace) {
- this.huggingFace = huggingFace;
- }
-
- public LocalAi getLocalAi() {
- return localAi;
- }
-
- public void setLocalAi(LocalAi localAi) {
- this.localAi = localAi;
- }
-
- public InProcess getInProcess() {
- return inProcess;
- }
-
- public void setInProcess(InProcess inProcess) {
- this.inProcess = inProcess;
- }
-}
\ No newline at end of file
diff --git a/common/src/main/java/dev/langchain4j/S2LangChain4jAutoConfiguration.java b/common/src/main/java/dev/langchain4j/S2LangChain4jAutoConfiguration.java
deleted file mode 100644
index e1616d225..000000000
--- a/common/src/main/java/dev/langchain4j/S2LangChain4jAutoConfiguration.java
+++ /dev/null
@@ -1,290 +0,0 @@
-package dev.langchain4j;
-
-import static dev.langchain4j.exception.IllegalConfigurationException.illegalConfiguration;
-import static dev.langchain4j.internal.Utils.isNullOrBlank;
-
-import dev.langchain4j.model.chat.ChatLanguageModel;
-import dev.langchain4j.model.embedding.EmbeddingModel;
-import dev.langchain4j.model.embedding.S2OnnxEmbeddingModel;
-import dev.langchain4j.model.embedding.BgeSmallZhEmbeddingModel;
-import dev.langchain4j.model.huggingface.HuggingFaceChatModel;
-import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
-import dev.langchain4j.model.huggingface.HuggingFaceLanguageModel;
-import dev.langchain4j.model.language.LanguageModel;
-import dev.langchain4j.model.localai.LocalAiChatModel;
-import dev.langchain4j.model.localai.LocalAiEmbeddingModel;
-import dev.langchain4j.model.localai.LocalAiLanguageModel;
-import dev.langchain4j.model.moderation.ModerationModel;
-import dev.langchain4j.model.openai.FullOpenAiChatModel;
-import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
-import dev.langchain4j.model.openai.OpenAiLanguageModel;
-import dev.langchain4j.model.openai.OpenAiModerationModel;
-import java.util.Objects;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
-import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Lazy;
-import org.springframework.context.annotation.Primary;
-
-@Configuration
-@EnableConfigurationProperties(S2LangChain4jProperties.class)
-public class S2LangChain4jAutoConfiguration {
-
- @Autowired
- private S2LangChain4jProperties properties;
-
- @Bean
- @Lazy
- @ConditionalOnMissingBean
- ChatLanguageModel chatLanguageModel(S2LangChain4jProperties properties) {
- if (properties.getChatModel() == null) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model' properties, for example:\n"
- + "langchain4j.chat-model.provider = openai\n"
- + "langchain4j.chat-model.openai.api-key = sk-...\n");
- }
-
- switch (properties.getChatModel().getProvider()) {
-
- case OPEN_AI:
- OpenAi openAi = properties.getChatModel().getOpenAi();
- if (openAi == null || isNullOrBlank(openAi.getApiKey())) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model.openai.api-key' property");
- }
- return FullOpenAiChatModel.builder()
- .baseUrl(openAi.getBaseUrl())
- .apiKey(openAi.getApiKey())
- .modelName(openAi.getModelName())
- .temperature(openAi.getTemperature())
- .topP(openAi.getTopP())
- .maxTokens(openAi.getMaxTokens())
- .presencePenalty(openAi.getPresencePenalty())
- .frequencyPenalty(openAi.getFrequencyPenalty())
- .timeout(openAi.getTimeout())
- .maxRetries(openAi.getMaxRetries())
- .logRequests(openAi.getLogRequests())
- .logResponses(openAi.getLogResponses())
- .build();
-
- case HUGGING_FACE:
- HuggingFace huggingFace = properties.getChatModel().getHuggingFace();
- if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.chat-model.huggingface.access-token' property");
- }
- return HuggingFaceChatModel.builder()
- .accessToken(huggingFace.getAccessToken())
- .modelId(huggingFace.getModelId())
- .timeout(huggingFace.getTimeout())
- .temperature(huggingFace.getTemperature())
- .maxNewTokens(huggingFace.getMaxNewTokens())
- .returnFullText(huggingFace.getReturnFullText())
- .waitForModel(huggingFace.getWaitForModel())
- .build();
-
- case LOCAL_AI:
- LocalAi localAi = properties.getChatModel().getLocalAi();
- if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model.localai.base-url' property");
- }
- if (isNullOrBlank(localAi.getModelName())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.chat-model.localai.model-name' property");
- }
- return LocalAiChatModel.builder()
- .baseUrl(localAi.getBaseUrl())
- .modelName(localAi.getModelName())
- .temperature(localAi.getTemperature())
- .topP(localAi.getTopP())
- .maxTokens(localAi.getMaxTokens())
- .timeout(localAi.getTimeout())
- .maxRetries(localAi.getMaxRetries())
- .logRequests(localAi.getLogRequests())
- .logResponses(localAi.getLogResponses())
- .build();
-
- default:
- throw illegalConfiguration("Unsupported chat model provider: %s",
- properties.getChatModel().getProvider());
- }
- }
-
- @Bean
- @Lazy
- @ConditionalOnMissingBean
- LanguageModel languageModel(S2LangChain4jProperties properties) {
- if (properties.getLanguageModel() == null) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.language-model' properties, for example:\n"
- + "langchain4j.language-model.provider = openai\n"
- + "langchain4j.language-model.openai.api-key = sk-...\n");
- }
-
- switch (properties.getLanguageModel().getProvider()) {
-
- case OPEN_AI:
- OpenAi openAi = properties.getLanguageModel().getOpenAi();
- if (openAi == null || isNullOrBlank(openAi.getApiKey())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.language-model.openai.api-key' property");
- }
- return OpenAiLanguageModel.builder()
- .apiKey(openAi.getApiKey())
- .baseUrl(openAi.getBaseUrl())
- .modelName(openAi.getModelName())
- .temperature(openAi.getTemperature())
- .timeout(openAi.getTimeout())
- .maxRetries(openAi.getMaxRetries())
- .logRequests(openAi.getLogRequests())
- .logResponses(openAi.getLogResponses())
- .build();
-
- case HUGGING_FACE:
- HuggingFace huggingFace = properties.getLanguageModel().getHuggingFace();
- if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.language-model.huggingface.access-token' property");
- }
- return HuggingFaceLanguageModel.builder()
- .accessToken(huggingFace.getAccessToken())
- .modelId(huggingFace.getModelId())
- .timeout(huggingFace.getTimeout())
- .temperature(huggingFace.getTemperature())
- .maxNewTokens(huggingFace.getMaxNewTokens())
- .returnFullText(huggingFace.getReturnFullText())
- .waitForModel(huggingFace.getWaitForModel())
- .build();
-
- case LOCAL_AI:
- LocalAi localAi = properties.getLanguageModel().getLocalAi();
- if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.language-model.localai.base-url' property");
- }
- if (isNullOrBlank(localAi.getModelName())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.language-model.localai.model-name' property");
- }
- return LocalAiLanguageModel.builder()
- .baseUrl(localAi.getBaseUrl())
- .modelName(localAi.getModelName())
- .temperature(localAi.getTemperature())
- .topP(localAi.getTopP())
- .maxTokens(localAi.getMaxTokens())
- .timeout(localAi.getTimeout())
- .maxRetries(localAi.getMaxRetries())
- .logRequests(localAi.getLogRequests())
- .logResponses(localAi.getLogResponses())
- .build();
-
- default:
- throw illegalConfiguration("Unsupported language model provider: %s",
- properties.getLanguageModel().getProvider());
- }
- }
-
- @Bean
- @Lazy
- @ConditionalOnMissingBean
- @Primary
- EmbeddingModel embeddingModel(S2LangChain4jProperties properties) {
-
- if (properties.getEmbeddingModel() == null || properties.getEmbeddingModel().getProvider() == null) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.embedding-model' properties, for example:\n"
- + "langchain4j.embedding-model.provider = openai\n"
- + "langchain4j.embedding-model.openai.api-key = sk-...\n");
- }
-
- switch (properties.getEmbeddingModel().getProvider()) {
-
- case OPEN_AI:
- OpenAi openAi = properties.getEmbeddingModel().getOpenAi();
- if (openAi == null || isNullOrBlank(openAi.getApiKey())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.embedding-model.openai.api-key' property");
- }
- return OpenAiEmbeddingModel.builder()
- .apiKey(openAi.getApiKey())
- .baseUrl(openAi.getBaseUrl())
- .modelName(openAi.getModelName())
- .timeout(openAi.getTimeout())
- .maxRetries(openAi.getMaxRetries())
- .logRequests(openAi.getLogRequests())
- .logResponses(openAi.getLogResponses())
- .build();
-
- case HUGGING_FACE:
- HuggingFace huggingFace = properties.getEmbeddingModel().getHuggingFace();
- if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.embedding-model.huggingface.access-token' property");
- }
- return HuggingFaceEmbeddingModel.builder()
- .accessToken(huggingFace.getAccessToken())
- .modelId(huggingFace.getModelId())
- .waitForModel(huggingFace.getWaitForModel())
- .timeout(huggingFace.getTimeout())
- .build();
-
- case LOCAL_AI:
- LocalAi localAi = properties.getEmbeddingModel().getLocalAi();
- if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.embedding-model.localai.base-url' property");
- }
- if (isNullOrBlank(localAi.getModelName())) {
- throw illegalConfiguration(
- "\n\nPlease define 'langchain4j.embedding-model.localai.model-name' property");
- }
- return LocalAiEmbeddingModel.builder()
- .baseUrl(localAi.getBaseUrl())
- .modelName(localAi.getModelName())
- .timeout(localAi.getTimeout())
- .maxRetries(localAi.getMaxRetries())
- .logRequests(localAi.getLogRequests())
- .logResponses(localAi.getLogResponses())
- .build();
- case IN_PROCESS:
- InProcess inProcess = properties.getEmbeddingModel().getInProcess();
- if (Objects.isNull(inProcess) || isNullOrBlank(inProcess.getModelPath())) {
- return new BgeSmallZhEmbeddingModel();
- }
- return new S2OnnxEmbeddingModel(inProcess.getModelPath(), inProcess.getVocabularyPath());
-
- default:
- throw illegalConfiguration("Unsupported embedding model provider: %s",
- properties.getEmbeddingModel().getProvider());
- }
- }
-
- @Bean
- @Lazy
- @ConditionalOnMissingBean
- ModerationModel moderationModel(S2LangChain4jProperties properties) {
- if (properties.getModerationModel() == null) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.moderation-model' properties, for example:\n"
- + "langchain4j.moderation-model.provider = openai\n"
- + "langchain4j.moderation-model.openai.api-key = sk-...\n");
- }
-
- if (properties.getModerationModel().getProvider() != ModelProvider.OPEN_AI) {
- throw illegalConfiguration("Unsupported moderation model provider: %s",
- properties.getModerationModel().getProvider());
- }
-
- OpenAi openAi = properties.getModerationModel().getOpenAi();
- if (openAi == null || isNullOrBlank(openAi.getApiKey())) {
- throw illegalConfiguration("\n\nPlease define 'langchain4j.moderation-model.openai.api-key' property");
- }
-
- return OpenAiModerationModel.builder()
- .apiKey(openAi.getApiKey())
- .modelName(openAi.getModelName())
- .timeout(openAi.getTimeout())
- .maxRetries(openAi.getMaxRetries())
- .logRequests(openAi.getLogRequests())
- .logResponses(openAi.getLogResponses())
- .build();
- }
-
-}
diff --git a/common/src/main/java/dev/langchain4j/S2LangChain4jProperties.java b/common/src/main/java/dev/langchain4j/S2LangChain4jProperties.java
deleted file mode 100644
index b6186899d..000000000
--- a/common/src/main/java/dev/langchain4j/S2LangChain4jProperties.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package dev.langchain4j;
-
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.boot.context.properties.NestedConfigurationProperty;
-
-@ConfigurationProperties(prefix = "s2.langchain4j")
-public class S2LangChain4jProperties {
-
- @NestedConfigurationProperty
- private ChatModel chatModel;
- @NestedConfigurationProperty
- private LanguageModel languageModel;
- @NestedConfigurationProperty
- private S2EmbeddingModel embeddingModel;
- @NestedConfigurationProperty
- private ModerationModel moderationModel;
-
- public ChatModel getChatModel() {
- return chatModel;
- }
-
- public void setChatModel(ChatModel chatModel) {
- this.chatModel = chatModel;
- }
-
- public LanguageModel getLanguageModel() {
- return languageModel;
- }
-
- public void setLanguageModel(LanguageModel languageModel) {
- this.languageModel = languageModel;
- }
-
- public S2EmbeddingModel getEmbeddingModel() {
- return embeddingModel;
- }
-
- public void setEmbeddingModel(S2EmbeddingModel s2EmbeddingModel) {
- this.embeddingModel = s2EmbeddingModel;
- }
-
- public ModerationModel getModerationModel() {
- return moderationModel;
- }
-
- public void setModerationModel(ModerationModel moderationModel) {
- this.moderationModel = moderationModel;
- }
-}
diff --git a/common/src/main/java/dev/langchain4j/model/ChatModel.java b/common/src/main/java/dev/langchain4j/model/ChatModel.java
deleted file mode 100644
index 8562df8c0..000000000
--- a/common/src/main/java/dev/langchain4j/model/ChatModel.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package dev.langchain4j.model;
-
-public enum ChatModel {
- ZHIPU("glm"),
- ALI("qwen");
-
- private final String modelName;
-
- private ChatModel(String modelName) {
- this.modelName = modelName;
- }
-
- public String toString() {
- return this.modelName;
- }
-
- public static ChatModel from(String stringValue) {
- ChatModel[] var1 = values();
- int var2 = var1.length;
-
- for (int var3 = 0; var3 < var2; ++var3) {
- ChatModel model = var1[var3];
- if (model.modelName.equals(stringValue)) {
- return model;
- }
- }
-
- throw new IllegalArgumentException("Unknown role: '" + stringValue + "'");
- }
-}
diff --git a/common/src/main/java/dev/langchain4j/model/embedding/EmbeddingModelConfig.java b/common/src/main/java/dev/langchain4j/model/embedding/EmbeddingModelConfig.java
new file mode 100644
index 000000000..7577f926a
--- /dev/null
+++ b/common/src/main/java/dev/langchain4j/model/embedding/EmbeddingModelConfig.java
@@ -0,0 +1,15 @@
+package dev.langchain4j.model.embedding;
+
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class EmbeddingModelConfig {
+
+ @Bean
+ @ConditionalOnMissingBean
+ public EmbeddingModel embeddingModel() {
+ return new BgeSmallZhEmbeddingModel();
+ }
+}
\ No newline at end of file
diff --git a/common/src/main/java/dev/langchain4j/model/embedding/S2OnnxEmbeddingModel.java b/common/src/main/java/dev/langchain4j/model/embedding/S2OnnxEmbeddingModel.java
index 73bdc61dc..4cba5b02e 100644
--- a/common/src/main/java/dev/langchain4j/model/embedding/S2OnnxEmbeddingModel.java
+++ b/common/src/main/java/dev/langchain4j/model/embedding/S2OnnxEmbeddingModel.java
@@ -1,12 +1,13 @@
package dev.langchain4j.model.embedding;
+import org.apache.commons.lang3.StringUtils;
+
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import org.apache.commons.lang3.StringUtils;
/**
* An embedding model that runs within your Java application's process.
@@ -51,7 +52,7 @@ public class S2OnnxEmbeddingModel extends AbstractInProcessEmbeddingModel {
try {
return new OnnxBertBiEncoder(
Files.newInputStream(pathToModel),
- vocabularyFile,
+ vocabularyFile.openStream(),
PoolingMode.MEAN
);
} catch (IOException e) {
diff --git a/common/src/main/java/dev/langchain4j/model/openai/FullOpenAiChatModel.java b/common/src/main/java/dev/langchain4j/model/openai/FullOpenAiChatModel.java
deleted file mode 100644
index fef875c68..000000000
--- a/common/src/main/java/dev/langchain4j/model/openai/FullOpenAiChatModel.java
+++ /dev/null
@@ -1,227 +0,0 @@
-package dev.langchain4j.model.openai;
-
-import dev.ai4j.openai4j.OpenAiClient;
-import dev.ai4j.openai4j.chat.ChatCompletionChoice;
-import dev.ai4j.openai4j.chat.ChatCompletionRequest;
-import dev.ai4j.openai4j.chat.ChatCompletionResponse;
-import dev.ai4j.openai4j.chat.ChatCompletionRequest.Builder;
-import dev.langchain4j.agent.tool.ToolSpecification;
-import dev.langchain4j.data.message.AiMessage;
-import dev.langchain4j.data.message.ChatMessage;
-import dev.langchain4j.internal.RetryUtils;
-import dev.langchain4j.internal.Utils;
-import dev.langchain4j.model.ChatModel;
-import dev.langchain4j.model.Tokenizer;
-import dev.langchain4j.model.chat.ChatLanguageModel;
-import dev.langchain4j.model.chat.TokenCountEstimator;
-import dev.langchain4j.model.output.Response;
-
-import java.net.Proxy;
-import java.time.Duration;
-import java.util.Collections;
-import java.util.List;
-
-public class FullOpenAiChatModel implements ChatLanguageModel, TokenCountEstimator {
-
- private final OpenAiClient client;
- private final String modelName;
- private final Double temperature;
- private final Double topP;
- private final List stop;
- private final Integer maxTokens;
- private final Double presencePenalty;
- private final Double frequencyPenalty;
- private final Integer maxRetries;
- private final Tokenizer tokenizer;
-
- public FullOpenAiChatModel(String baseUrl, String apiKey, String modelName, Double temperature,
- Double topP, List stop, Integer maxTokens, Double presencePenalty,
- Double frequencyPenalty, Duration timeout, Integer maxRetries, Proxy proxy,
- Boolean logRequests, Boolean logResponses, Tokenizer tokenizer) {
- baseUrl = Utils.getOrDefault(baseUrl, "https://api.openai.com/v1");
- if ("demo".equals(apiKey)) {
- baseUrl = "http://langchain4j.dev/demo/openai/v1";
- }
-
- timeout = Utils.getOrDefault(timeout, Duration.ofSeconds(60L));
- this.client = OpenAiClient.builder().openAiApiKey(apiKey)
- .baseUrl(baseUrl).callTimeout(timeout).connectTimeout(timeout)
- .readTimeout(timeout).writeTimeout(timeout).proxy(proxy)
- .logRequests(logRequests).logResponses(logResponses).build();
- this.modelName = Utils.getOrDefault(modelName, "gpt-3.5-turbo");
- this.temperature = Utils.getOrDefault(temperature, 0.7D);
- this.topP = topP;
- this.stop = stop;
- this.maxTokens = maxTokens;
- this.presencePenalty = presencePenalty;
- this.frequencyPenalty = frequencyPenalty;
- this.maxRetries = Utils.getOrDefault(maxRetries, 3);
- this.tokenizer = Utils.getOrDefault(tokenizer, new OpenAiTokenizer(this.modelName));
- }
-
- public Response generate(List messages) {
- return this.generate(messages, null, null);
- }
-
- public Response generate(List messages, List toolSpecifications) {
- return this.generate(messages, toolSpecifications, null);
- }
-
- public Response generate(List messages, ToolSpecification toolSpecification) {
- return this.generate(messages, Collections.singletonList(toolSpecification), toolSpecification);
- }
-
- private Response generate(List messages,
- List toolSpecifications,
- ToolSpecification toolThatMustBeExecuted) {
- Builder requestBuilder = null;
- if (modelName.contains(ChatModel.ZHIPU.toString()) || modelName.contains(ChatModel.ALI.toString())) {
- requestBuilder = ChatCompletionRequest.builder()
- .model(this.modelName)
- .messages(ImproveInternalOpenAiHelper.toOpenAiMessages(messages, this.modelName));
- } else {
- requestBuilder = ChatCompletionRequest.builder()
- .model(this.modelName)
- .messages(ImproveInternalOpenAiHelper.toOpenAiMessages(messages, this.modelName))
- .temperature(this.temperature).topP(this.topP).stop(this.stop).maxTokens(this.maxTokens)
- .presencePenalty(this.presencePenalty).frequencyPenalty(this.frequencyPenalty);
- }
- if (toolSpecifications != null && !toolSpecifications.isEmpty()) {
- requestBuilder.functions(InternalOpenAiHelper.toFunctions(toolSpecifications));
- }
-
- if (toolThatMustBeExecuted != null) {
- requestBuilder.functionCall(toolThatMustBeExecuted.name());
- }
-
- ChatCompletionRequest request = requestBuilder.build();
- ChatCompletionResponse response = (ChatCompletionResponse) RetryUtils.withRetry(() -> {
- return (ChatCompletionResponse) this.client.chatCompletion(request).execute();
- }, this.maxRetries);
- return Response.from(InternalOpenAiHelper.aiMessageFrom(response),
- InternalOpenAiHelper.tokenUsageFrom(response.usage()),
- InternalOpenAiHelper.finishReasonFrom(
- ((ChatCompletionChoice) response.choices().get(0)).finishReason()));
- }
-
- public int estimateTokenCount(List messages) {
- return this.tokenizer.estimateTokenCountInMessages(messages);
- }
-
- public static FullOpenAiChatModel.FullOpenAiChatModelBuilder builder() {
- return new FullOpenAiChatModel.FullOpenAiChatModelBuilder();
- }
-
- public static class FullOpenAiChatModelBuilder {
-
- private String baseUrl;
- private String apiKey;
- private String modelName;
- private Double temperature;
- private Double topP;
- private List stop;
- private Integer maxTokens;
- private Double presencePenalty;
- private Double frequencyPenalty;
- private Duration timeout;
- private Integer maxRetries;
- private Proxy proxy;
- private Boolean logRequests;
- private Boolean logResponses;
- private Tokenizer tokenizer;
-
- FullOpenAiChatModelBuilder() {
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder baseUrl(String baseUrl) {
- this.baseUrl = baseUrl;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder apiKey(String apiKey) {
- this.apiKey = apiKey;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder modelName(String modelName) {
- this.modelName = modelName;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder temperature(Double temperature) {
- this.temperature = temperature;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder topP(Double topP) {
- this.topP = topP;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder stop(List stop) {
- this.stop = stop;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder maxTokens(Integer maxTokens) {
- this.maxTokens = maxTokens;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder presencePenalty(Double presencePenalty) {
- this.presencePenalty = presencePenalty;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder frequencyPenalty(Double frequencyPenalty) {
- this.frequencyPenalty = frequencyPenalty;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder timeout(Duration timeout) {
- this.timeout = timeout;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder maxRetries(Integer maxRetries) {
- this.maxRetries = maxRetries;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder proxy(Proxy proxy) {
- this.proxy = proxy;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder logRequests(Boolean logRequests) {
- this.logRequests = logRequests;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder logResponses(Boolean logResponses) {
- this.logResponses = logResponses;
- return this;
- }
-
- public FullOpenAiChatModel.FullOpenAiChatModelBuilder tokenizer(Tokenizer tokenizer) {
- this.tokenizer = tokenizer;
- return this;
- }
-
- public FullOpenAiChatModel build() {
- return new FullOpenAiChatModel(this.baseUrl, this.apiKey, this.modelName, this.temperature,
- this.topP, this.stop, this.maxTokens, this.presencePenalty, this.frequencyPenalty,
- this.timeout, this.maxRetries, this.proxy, this.logRequests, this.logResponses, this.tokenizer);
- }
-
- public String toString() {
- return "FullOpenAiChatModel.FullOpenAiChatModelBuilder(baseUrl=" + this.baseUrl
- + ", apiKey=" + this.apiKey + ", modelName=" + this.modelName + ", temperature="
- + this.temperature + ", topP=" + this.topP + ", stop=" + this.stop + ", maxTokens="
- + this.maxTokens + ", presencePenalty=" + this.presencePenalty + ", frequencyPenalty="
- + this.frequencyPenalty + ", timeout=" + this.timeout + ", maxRetries=" + this.maxRetries
- + ", proxy=" + this.proxy + ", logRequests=" + this.logRequests + ", logResponses="
- + this.logResponses + ", tokenizer=" + this.tokenizer + ")";
- }
- }
-}
diff --git a/common/src/main/java/dev/langchain4j/model/openai/ImproveInternalOpenAiHelper.java b/common/src/main/java/dev/langchain4j/model/openai/ImproveInternalOpenAiHelper.java
deleted file mode 100644
index ed2b1ac6f..000000000
--- a/common/src/main/java/dev/langchain4j/model/openai/ImproveInternalOpenAiHelper.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package dev.langchain4j.model.openai;
-
-import dev.ai4j.openai4j.chat.FunctionCall;
-import dev.ai4j.openai4j.chat.Message;
-import dev.ai4j.openai4j.chat.Role;
-import dev.langchain4j.data.message.AiMessage;
-import dev.langchain4j.data.message.ChatMessage;
-import dev.langchain4j.data.message.SystemMessage;
-import dev.langchain4j.data.message.ToolExecutionResultMessage;
-import dev.langchain4j.data.message.UserMessage;
-import dev.langchain4j.model.ChatModel;
-import java.util.List;
-import java.util.stream.Collectors;
-
-public class ImproveInternalOpenAiHelper {
-
- public ImproveInternalOpenAiHelper() {
- }
-
- public static List toOpenAiMessages(List messages, String modelName) {
- List messageList = messages.stream()
- .map(message -> toOpenAiMessage(message, modelName)).collect(Collectors.toList());
- return messageList;
- }
-
- public static Message toOpenAiMessage(ChatMessage message, String modelName) {
- return Message.builder().role(roleFrom(message, modelName))
- .name(nameFrom(message)).content(message.text())
- .functionCall(functionCallFrom(message)).build();
- }
-
- private static String nameFrom(ChatMessage message) {
- if (message instanceof UserMessage) {
- return ((UserMessage) message).name();
- } else {
- return message instanceof ToolExecutionResultMessage
- ? ((ToolExecutionResultMessage) message).toolName() : null;
- }
- }
-
- private static FunctionCall functionCallFrom(ChatMessage message) {
- if (message instanceof AiMessage) {
- AiMessage aiMessage = (AiMessage) message;
- if (aiMessage.toolExecutionRequest() != null) {
- return FunctionCall.builder().name(aiMessage.toolExecutionRequest().name())
- .arguments(aiMessage.toolExecutionRequest().arguments()).build();
- }
- }
-
- return null;
- }
-
- public static Role roleFrom(ChatMessage message, String modelName) {
- if (modelName.contains(ChatModel.ZHIPU.toString()) || modelName.contains(ChatModel.ALI.toString())) {
- return Role.USER;
- }
- if (message instanceof AiMessage) {
- return Role.ASSISTANT;
- } else if (message instanceof ToolExecutionResultMessage) {
- return Role.FUNCTION;
- } else {
- return message instanceof SystemMessage ? Role.SYSTEM : Role.USER;
- }
- }
-
-}
diff --git a/common/src/main/java/dev/langchain4j/store/embedding/InMemoryS2EmbeddingStore.java b/common/src/main/java/dev/langchain4j/store/embedding/InMemoryS2EmbeddingStore.java
index fc3e13dd9..009536eab 100644
--- a/common/src/main/java/dev/langchain4j/store/embedding/InMemoryS2EmbeddingStore.java
+++ b/common/src/main/java/dev/langchain4j/store/embedding/InMemoryS2EmbeddingStore.java
@@ -86,7 +86,7 @@ public class InMemoryS2EmbeddingStore implements S2EmbeddingStore {
@Override
public void addQuery(String collectionName, List queries) {
InMemoryEmbeddingStore embeddingStore = getEmbeddingStore(collectionName);
- EmbeddingModel embeddingModel = ContextUtils.getBean(EmbeddingModel.class);
+ EmbeddingModel embeddingModel = getEmbeddingModel();
for (EmbeddingQuery query : queries) {
String question = query.getQuery();
Embedding embedding = embeddingModel.embed(question).content();
@@ -94,6 +94,10 @@ public class InMemoryS2EmbeddingStore implements S2EmbeddingStore {
}
}
+ private static EmbeddingModel getEmbeddingModel() {
+ return ContextUtils.getBean(EmbeddingModel.class);
+ }
+
private InMemoryEmbeddingStore getEmbeddingStore(String collectionName) {
InMemoryEmbeddingStore embeddingStore = collectionNameToStore.get(collectionName);
if (Objects.isNull(embeddingStore)) {
@@ -113,7 +117,7 @@ public class InMemoryS2EmbeddingStore implements S2EmbeddingStore {
@Override
public List retrieveQuery(String collectionName, RetrieveQuery retrieveQuery, int num) {
InMemoryEmbeddingStore embeddingStore = getEmbeddingStore(collectionName);
- EmbeddingModel embeddingModel = ContextUtils.getBean(EmbeddingModel.class);
+ EmbeddingModel embeddingModel = getEmbeddingModel();
List results = new ArrayList<>();
diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ModelSchemaResp.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ModelSchemaResp.java
index ac515e6e9..8a48e2f68 100644
--- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ModelSchemaResp.java
+++ b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ModelSchemaResp.java
@@ -2,13 +2,14 @@ package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ModelRela;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
@Data
@AllArgsConstructor
diff --git a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ParseResp.java b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ParseResp.java
index 031395c6a..02d3ff62b 100644
--- a/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ParseResp.java
+++ b/headless/api/src/main/java/com/tencent/supersonic/headless/api/pojo/response/ParseResp.java
@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import lombok.Data;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import java.util.Comparator;
import java.util.List;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/mapper/BaseMatchStrategy.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/mapper/BaseMatchStrategy.java
index c28f788a1..a289ab0f9 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/mapper/BaseMatchStrategy.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/mapper/BaseMatchStrategy.java
@@ -6,7 +6,7 @@ import com.tencent.supersonic.headless.api.pojo.response.S2Term;
import com.tencent.supersonic.headless.chat.QueryContext;
import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/QueryTypeParser.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/QueryTypeParser.java
index 17dcf9298..afdab2d54 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/QueryTypeParser.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/QueryTypeParser.java
@@ -14,7 +14,7 @@ import com.tencent.supersonic.headless.chat.query.rule.RuleSemanticQuery;
import com.tencent.supersonic.headless.chat.ChatContext;
import com.tencent.supersonic.headless.chat.QueryContext;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/ExemplarManager.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/ExemplarManager.java
index d54428485..fe5cac84b 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/ExemplarManager.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/ExemplarManager.java
@@ -11,7 +11,7 @@ import dev.langchain4j.store.embedding.RetrieveQuery;
import dev.langchain4j.store.embedding.RetrieveQueryResult;
import dev.langchain4j.store.embedding.S2EmbeddingStore;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Component;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMResponseService.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMResponseService.java
index aa1d4f17f..4ed7ec1b6 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMResponseService.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMResponseService.java
@@ -10,7 +10,7 @@ import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMSqlQuery;
import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMSqlResp;
import com.tencent.supersonic.headless.chat.QueryContext;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.collections.MapUtils;
import org.springframework.stereotype.Service;
import java.util.HashMap;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMSqlParser.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMSqlParser.java
index 330d3aab8..302cc7623 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMSqlParser.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/LLMSqlParser.java
@@ -10,7 +10,7 @@ import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMSqlResp;
import com.tencent.supersonic.headless.chat.parser.SemanticParser;
import com.tencent.supersonic.headless.chat.ChatContext;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/PythonLLMProxy.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/PythonLLMProxy.java
index 6440ddd66..a57f7fc16 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/PythonLLMProxy.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/PythonLLMProxy.java
@@ -5,7 +5,7 @@ import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.chat.query.llm.s2sql.LLMResp;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/BaseSemanticQuery.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/BaseSemanticQuery.java
index 1474ca56d..0a14d4bef 100644
--- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/BaseSemanticQuery.java
+++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/query/BaseSemanticQuery.java
@@ -14,7 +14,7 @@ import com.tencent.supersonic.headless.chat.parser.ParserConfig;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import java.io.Serializable;
import java.util.List;
diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/parser/calcite/sql/node/SemanticNode.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/parser/calcite/sql/node/SemanticNode.java
index 1258bb36a..e2016e6fb 100644
--- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/parser/calcite/sql/node/SemanticNode.java
+++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/parser/calcite/sql/node/SemanticNode.java
@@ -8,17 +8,6 @@ import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.function.UnaryOperator;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner;
@@ -48,9 +37,21 @@ import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.sql.validate.SqlValidatorWithHints;
import org.apache.calcite.sql2rel.SqlToRelConverter;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.UnaryOperator;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
/**
* model item node
*/
diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/ClassRepositoryImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/ClassRepositoryImpl.java
index 32464ae53..70deb580c 100644
--- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/ClassRepositoryImpl.java
+++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/persistence/repository/impl/ClassRepositoryImpl.java
@@ -5,7 +5,7 @@ import com.tencent.supersonic.headless.server.persistence.dataobject.ClassDO;
import com.tencent.supersonic.headless.server.persistence.mapper.ClassMapper;
import com.tencent.supersonic.headless.server.persistence.repository.ClassRepository;
import com.tencent.supersonic.headless.server.pojo.ClassFilter;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Repository;
diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/CatalogImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/CatalogImpl.java
index 56ec65f51..21a6d4e60 100644
--- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/CatalogImpl.java
+++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/CatalogImpl.java
@@ -27,7 +27,7 @@ import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.SchemaService;
import com.tencent.supersonic.headless.server.service.DataSetService;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/MetaDiscoveryServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/MetaDiscoveryServiceImpl.java
index ba0338d37..7069a0641 100644
--- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/MetaDiscoveryServiceImpl.java
+++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/MetaDiscoveryServiceImpl.java
@@ -19,7 +19,7 @@ import com.tencent.supersonic.headless.server.pojo.MetaFilter;
import com.tencent.supersonic.headless.server.service.ChatQueryService;
import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.MetaDiscoveryService;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/WorkflowServiceImpl.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/WorkflowServiceImpl.java
index 4da96b3fa..3ee22ac7a 100644
--- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/WorkflowServiceImpl.java
+++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/service/impl/WorkflowServiceImpl.java
@@ -14,8 +14,8 @@ import com.tencent.supersonic.headless.server.processor.ResultProcessor;
import com.tencent.supersonic.headless.server.service.WorkflowService;
import com.tencent.supersonic.headless.server.utils.ComponentFactory;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import org.springframework.stereotype.Service;
import java.util.List;
diff --git a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/ClassConverter.java b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/ClassConverter.java
index 281006863..bcaff7a15 100644
--- a/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/ClassConverter.java
+++ b/headless/server/src/main/java/com/tencent/supersonic/headless/server/utils/ClassConverter.java
@@ -10,7 +10,7 @@ import com.tencent.supersonic.headless.server.persistence.repository.ClassReposi
import com.tencent.supersonic.headless.server.service.DomainService;
import com.tencent.supersonic.headless.server.service.TagObjectService;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Component;
diff --git a/launchers/standalone/src/main/java/com/tencent/supersonic/StandaloneLauncher.java b/launchers/standalone/src/main/java/com/tencent/supersonic/StandaloneLauncher.java
index 14436c57f..60a8278f4 100644
--- a/launchers/standalone/src/main/java/com/tencent/supersonic/StandaloneLauncher.java
+++ b/launchers/standalone/src/main/java/com/tencent/supersonic/StandaloneLauncher.java
@@ -1,20 +1,17 @@
package com.tencent.supersonic;
-import dev.langchain4j.S2LangChain4jAutoConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
-import org.springframework.context.annotation.Import;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
-@SpringBootApplication(scanBasePackages = {"com.tencent.supersonic"},
+@SpringBootApplication(scanBasePackages = {"com.tencent.supersonic", "dev.langchain4j.model"},
exclude = {MongoAutoConfiguration.class, MongoDataAutoConfiguration.class})
@EnableScheduling
@EnableAsync
-@Import(S2LangChain4jAutoConfiguration.class)
@EnableSwagger2
public class StandaloneLauncher {
diff --git a/launchers/standalone/src/main/resources/META-INF/spring.factories b/launchers/standalone/src/main/resources/META-INF/spring.factories
index 70e918881..8d5f155a4 100644
--- a/launchers/standalone/src/main/resources/META-INF/spring.factories
+++ b/launchers/standalone/src/main/resources/META-INF/spring.factories
@@ -88,3 +88,11 @@ com.tencent.supersonic.auth.api.authentication.adaptor.UserAdaptor=\
dev.langchain4j.store.embedding.S2EmbeddingStore=\
dev.langchain4j.store.embedding.InMemoryS2EmbeddingStore
+
+org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
+ dev.langchain4j.spring.LangChain4jAutoConfig,\
+ dev.langchain4j.openai.spring.AutoConfig,\
+ dev.langchain4j.ollama.spring.AutoConfig,\
+ dev.langchain4j.azure.openai.spring.AutoConfig,\
+ dev.langchain4j.azure.aisearch.spring.AutoConfig,\
+ dev.langchain4j.anthropic.spring.AutoConfig
\ No newline at end of file
diff --git a/launchers/standalone/src/main/resources/application-local.yaml b/launchers/standalone/src/main/resources/application-local.yaml
index 639314d30..78a6d0094 100644
--- a/launchers/standalone/src/main/resources/application-local.yaml
+++ b/launchers/standalone/src/main/resources/application-local.yaml
@@ -17,7 +17,14 @@ spring:
url: jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false
username: root
password: semantic
-
+ autoconfigure:
+ exclude:
+ - spring.dev.langchain4j.spring.LangChain4jAutoConfig
+ - spring.dev.langchain4j.openai.spring.AutoConfig
+ - spring.dev.langchain4j.ollama.spring.AutoConfig
+ - spring.dev.langchain4j.azure.openai.spring.AutoConfig
+ - spring.dev.langchain4j.azure.aisearch.spring.AutoConfig
+ - spring.dev.langchain4j.anthropic.spring.AutoConfig
mybatis:
mapper-locations=classpath:mappers/custom/*.xml,classpath*:/mappers/*.xml
@@ -67,42 +74,6 @@ s2:
names: S2VisitsDemo,S2ArtistDemo
enableLLM: true
- langchain4j:
- #1.chat-model
- chat-model:
- provider: open_ai
- openai:
- # Replace with your LLM configs
- # Note: The default API key `demo` is provided by langchain4j community
- # which limits 1000 tokens per request.
- base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
- api-key: ${OPENAI_API_KEY:demo}
- model-name: ${OPENAI_MODEL_NAME:gpt-3.5-turbo}
- temperature: ${OPENAI_TEMPERATURE:0.0}
- timeout: ${OPENAI_TIMEOUT:PT60S}
- #2.embedding-model
- #2.1 in_memory(default)
- embedding-model:
- provider: in_process
- # inProcess:
- # modelPath: /data/model.onnx
- # vocabularyPath: /data/onnx_vocab.txt
- # shibing624/text2vec-base-chinese
- #2.2 open_ai
- # embedding-model:
- # provider: open_ai
- # openai:
- # api-key: api_key
- # modelName: all-minilm-l6-v2.onnx
-
- #2.2 hugging_face
- # embedding-model:
- # provider: hugging_face
- # hugging-face:
- # access-token: hg_access_token
- # model-id: sentence-transformers/all-MiniLM-L6-v2
- # timeout: 1h
-
# swagger配置
swagger:
title: 'SuperSonic平台接口文档'
@@ -115,3 +86,20 @@ swagger:
email:
url: ''
version: 3.0
+
+
+langchain4j:
+ open-ai:
+ chat-model:
+ # Replace with your LLM configs
+ # Note: The default API key `demo` is provided by langchain4j community
+ # which limits 1000 tokens per request.
+ base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
+ api-key: ${OPENAI_API_KEY:demo}
+ model-name: ${OPENAI_MODEL_NAME:gpt-3.5-turbo}
+ temperature: ${OPENAI_TEMPERATURE:0.0}
+ timeout: ${OPENAI_TIMEOUT:PT60S}
+# java.lang.RuntimeException: dev.ai4j.openai4j.OpenAiHttpException: Too many requests
+ embedding-model:
+ base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
+ api-key: ${OPENAI_API_KEY:demo}
\ No newline at end of file
diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java
index a47755a46..b84779db8 100644
--- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java
+++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MetricTest.java
@@ -40,7 +40,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name",
FilterOperatorEnum.EQUALS, "alice", "用户", 2L));
@@ -74,7 +73,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.RECENT, unit, period, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.METRIC);
@@ -103,7 +101,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("部门"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.RECENT, unit, period, startDay, endDay));
@@ -124,7 +121,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
List list = new ArrayList<>();
list.add("alice");
list.add("lucy");
@@ -171,7 +167,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("部门"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.RECENT, unit, period, startDay, endDay));
@@ -197,7 +192,6 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name",
FilterOperatorEnum.EQUALS, "alice", "用户", 2L));
diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MultiTurnsTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MultiTurnsTest.java
index 508108656..55737e5df 100644
--- a/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MultiTurnsTest.java
+++ b/launchers/standalone/src/test/java/com/tencent/supersonic/chat/MultiTurnsTest.java
@@ -29,7 +29,6 @@ public class MultiTurnsTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
- expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问用户数"));
expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name",
FilterOperatorEnum.EQUALS, "alice", "用户", 2L));
diff --git a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/BaseTest.java b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/BaseTest.java
index 86ded9925..d15865405 100644
--- a/launchers/standalone/src/test/java/com/tencent/supersonic/headless/BaseTest.java
+++ b/launchers/standalone/src/test/java/com/tencent/supersonic/headless/BaseTest.java
@@ -14,7 +14,7 @@ import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.server.service.QueryService;
import com.tencent.supersonic.util.DataUtils;
-import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
diff --git a/launchers/standalone/src/test/resources/META-INF/spring.factories b/launchers/standalone/src/test/resources/META-INF/spring.factories
index 5794db3e0..70e918881 100644
--- a/launchers/standalone/src/test/resources/META-INF/spring.factories
+++ b/launchers/standalone/src/test/resources/META-INF/spring.factories
@@ -1,3 +1,5 @@
+### headless-chat SPIs
+
com.tencent.supersonic.headless.chat.mapper.SchemaMapper=\
com.tencent.supersonic.headless.chat.mapper.EmbeddingMapper, \
com.tencent.supersonic.headless.chat.mapper.KeywordMapper, \
@@ -9,20 +11,20 @@ com.tencent.supersonic.headless.chat.parser.SemanticParser=\
com.tencent.supersonic.headless.chat.parser.llm.LLMSqlParser, \
com.tencent.supersonic.headless.chat.parser.QueryTypeParser
-com.tencent.supersonic.chat.server.parser.ChatParser=\
- com.tencent.supersonic.chat.server.parser.NL2PluginParser, \
- com.tencent.supersonic.chat.server.parser.MultiTurnParser, \
- com.tencent.supersonic.chat.server.parser.NL2SQLParser
-
-com.tencent.supersonic.chat.server.executor.ChatExecutor=\
- com.tencent.supersonic.chat.server.executor.PluginExecutor, \
- com.tencent.supersonic.chat.server.executor.SqlExecutor
-
com.tencent.supersonic.headless.chat.corrector.SemanticCorrector=\
com.tencent.supersonic.headless.chat.corrector.SchemaCorrector, \
com.tencent.supersonic.headless.chat.corrector.TimeCorrector, \
com.tencent.supersonic.headless.chat.corrector.GrammarCorrector
+com.tencent.supersonic.headless.chat.knowledge.file.FileHandler=\
+ com.tencent.supersonic.headless.chat.knowledge.file.FileHandlerImpl
+
+com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
+ com.tencent.supersonic.headless.chat.parser.llm.HeuristicDataSetResolver
+
+
+### headless-core SPIs
+
com.tencent.supersonic.headless.core.parser.converter.HeadlessConverter=\
com.tencent.supersonic.headless.core.parser.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.parser.converter.SqlVariableParseConverter,\
@@ -41,18 +43,24 @@ com.tencent.supersonic.headless.core.parser.SqlParser=\
com.tencent.supersonic.headless.core.cache.QueryCache=\
com.tencent.supersonic.headless.core.cache.DefaultQueryCache
+
+### headless-server SPIs
+
com.tencent.supersonic.headless.server.processor.ResultProcessor=\
com.tencent.supersonic.headless.server.processor.ParseInfoProcessor, \
com.tencent.supersonic.headless.server.processor.SqlInfoProcessor
-com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
- com.tencent.supersonic.headless.chat.parser.llm.HeuristicDataSetResolver
-com.tencent.supersonic.auth.authentication.interceptor.AuthenticationInterceptor=\
- com.tencent.supersonic.auth.authentication.interceptor.DefaultAuthenticationInterceptor
+### chat-server SPIs
-com.tencent.supersonic.auth.api.authentication.adaptor.UserAdaptor=\
- com.tencent.supersonic.auth.authentication.adaptor.DefaultUserAdaptor
+com.tencent.supersonic.chat.server.parser.ChatParser=\
+ com.tencent.supersonic.chat.server.parser.NL2PluginParser, \
+ com.tencent.supersonic.chat.server.parser.MultiTurnParser,\
+ com.tencent.supersonic.chat.server.parser.NL2SQLParser
+
+com.tencent.supersonic.chat.server.executor.ChatExecutor=\
+ com.tencent.supersonic.chat.server.executor.PluginExecutor, \
+ com.tencent.supersonic.chat.server.executor.SqlExecutor
com.tencent.supersonic.chat.server.plugin.recognize.PluginRecognizer=\
com.tencent.supersonic.chat.server.plugin.recognize.embedding.EmbeddingRecallRecognizer
@@ -67,5 +75,16 @@ com.tencent.supersonic.chat.server.processor.execute.ExecuteResultProcessor=\
com.tencent.supersonic.chat.server.processor.execute.DimensionRecommendProcessor,\
com.tencent.supersonic.chat.server.processor.execute.MetricRatioProcessor
+### auth-authentication SPIs
+
+com.tencent.supersonic.auth.authentication.interceptor.AuthenticationInterceptor=\
+ com.tencent.supersonic.auth.authentication.interceptor.DefaultAuthenticationInterceptor
+
+com.tencent.supersonic.auth.api.authentication.adaptor.UserAdaptor=\
+ com.tencent.supersonic.auth.authentication.adaptor.DefaultUserAdaptor
+
+
+### common SPIs
+
dev.langchain4j.store.embedding.S2EmbeddingStore=\
dev.langchain4j.store.embedding.InMemoryS2EmbeddingStore
diff --git a/launchers/standalone/src/test/resources/application-local.yaml b/launchers/standalone/src/test/resources/application-local.yaml
index 772a5cf3d..549167b8b 100644
--- a/launchers/standalone/src/test/resources/application-local.yaml
+++ b/launchers/standalone/src/test/resources/application-local.yaml
@@ -17,7 +17,14 @@ spring:
url: jdbc:h2:mem:semantic;DATABASE_TO_UPPER=false
username: root
password: semantic
-
+ autoconfigure:
+ exclude:
+ - spring.dev.langchain4j.spring.LangChain4jAutoConfig
+ - spring.dev.langchain4j.openai.spring.AutoConfig
+ - spring.dev.langchain4j.ollama.spring.AutoConfig
+ - spring.dev.langchain4j.azure.openai.spring.AutoConfig
+ - spring.dev.langchain4j.azure.aisearch.spring.AutoConfig
+ - spring.dev.langchain4j.anthropic.spring.AutoConfig
mybatis:
mapper-locations=classpath:mappers/custom/*.xml,classpath*:/mappers/*.xml
@@ -36,7 +43,11 @@ logging:
dev.ai4j.openai4j: DEBUG
s2:
+ pyllm:
+ url: http://127.0.0.1:9092
+
parser:
+ url: ${s2.pyllm.url}
strategy: ONE_PASS_SELF_CONSISTENCY
exemplar-recall:
number: 10
@@ -50,6 +61,14 @@ s2:
corrector:
additional:
information: true
+ date: true
+ functionCall:
+ url: ${s2.pyllm.url}
+
+ embedding:
+ url: ${s2.pyllm.url}
+ persistent:
+ path: /tmp
demo:
names: S2VisitsDemo,S2ArtistDemo
@@ -59,24 +78,6 @@ s2:
cache:
enable: false
- langchain4j:
- #1.chat-model
- chat-model:
- provider: open_ai
- openai:
- # Replace with your LLM configs
- # Note: The default API key `demo` is provided by langchain4j community
- # which limits 1000 tokens per request.
- base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
- api-key: ${OPENAI_API_KEY:demo}
- model-name: ${OPENAI_MODEL_NAME:gpt-3.5-turbo}
- temperature: ${OPENAI_TEMPERATURE:0.0}
- timeout: ${OPENAI_TIMEOUT:PT60S}
- #2.embedding-model
- #2.1 in_memory(default)
- embedding-model:
- provider: in_process
-
# swagger配置
swagger:
title: 'SuperSonic平台接口文档'
@@ -88,4 +89,21 @@ swagger:
name:
email:
url: ''
- version: 3.0
\ No newline at end of file
+ version: 3.0
+
+
+langchain4j:
+ open-ai:
+ chat-model:
+ # Replace with your LLM configs
+ # Note: The default API key `demo` is provided by langchain4j community
+ # which limits 1000 tokens per request.
+ base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
+ api-key: ${OPENAI_API_KEY:demo}
+ model-name: ${OPENAI_MODEL_NAME:gpt-3.5-turbo}
+ temperature: ${OPENAI_TEMPERATURE:0.0}
+ timeout: ${OPENAI_TIMEOUT:PT60S}
+# java.lang.RuntimeException: dev.ai4j.openai4j.OpenAiHttpException: Too many requests
+# embedding-model:
+# base-url: ${OPENAI_API_BASE:https://api.openai.com/v1}
+# api-key: ${OPENAI_API_KEY:demo}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 86896325b..c32c5fdb5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,7 +1,7 @@
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4.0.0
com.tencent.supersonic
@@ -73,7 +73,7 @@
22.3.0
2.2.6
3.17
- 0.24.0
+ 0.31.0
42.7.1
4.0.8
0.10.0
@@ -120,11 +120,6 @@
langchain4j-core
${langchain4j.version}
-
- dev.langchain4j
- langchain4j-spring-boot-starter
- ${langchain4j.version}
-
dev.langchain4j
langchain4j-open-ai
@@ -160,6 +155,41 @@
langchain4j-azure-open-ai
${langchain4j.version}
+
+ dev.langchain4j
+ langchain4j-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-anthropic-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-ollama-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-open-ai-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-azure-ai-search-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-azure-open-ai-spring-boot-starter
+ ${langchain4j.version}
+
+
+ dev.langchain4j
+ langchain4j-embeddings-all-minilm-l6-v2-q
+ ${langchain4j.version}
+