From acffc03c798ab4d1f0f4b6411a5e3a968a3061aa Mon Sep 17 00:00:00 2001 From: beat4ocean <105700600+beat4ocean@users.noreply.github.com> Date: Mon, 5 May 2025 15:47:51 +0800 Subject: [PATCH] [Improvement][headless] Replace deprecated LangChain4j APIs (#2235) --- .../dev/langchain4j/model/dify/DifyAiChatModel.java | 12 +++++------- .../langchain4j/model/openai/OpenAiChatModel.java | 6 +++--- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/common/src/main/java/dev/langchain4j/model/dify/DifyAiChatModel.java b/common/src/main/java/dev/langchain4j/model/dify/DifyAiChatModel.java index 3660df811..95dce7449 100644 --- a/common/src/main/java/dev/langchain4j/model/dify/DifyAiChatModel.java +++ b/common/src/main/java/dev/langchain4j/model/dify/DifyAiChatModel.java @@ -9,6 +9,7 @@ import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.output.Response; import lombok.Builder; +import lombok.Setter; import java.util.List; @@ -32,6 +33,7 @@ public class DifyAiChatModel implements ChatLanguageModel { private final Double temperature; private final Long timeOut; + @Setter private String userName; @Builder @@ -54,7 +56,7 @@ public class DifyAiChatModel implements ChatLanguageModel { @Override public String generate(String message) { DifyResult difyResult = this.difyClient.generate(message, this.getUserName()); - return difyResult.getAnswer().toString(); + return difyResult.getAnswer(); } @Override @@ -67,7 +69,7 @@ public class DifyAiChatModel implements ChatLanguageModel { List toolSpecifications) { ensureNotEmpty(messages, "messages"); DifyResult difyResult = - this.difyClient.generate(messages.get(0).text(), this.getUserName()); + this.difyClient.generate(messages.get(0).toString(), this.getUserName()); System.out.println(difyResult.toString()); if (!isNullOrEmpty(toolSpecifications)) { @@ -84,12 +86,8 @@ public class DifyAiChatModel implements ChatLanguageModel { toolSpecification != null ? singletonList(toolSpecification) : null); } - public void setUserName(String userName) { - this.userName = userName; - } - public String getUserName() { - return null == userName ? "zhaodongsheng" : userName; + return null == userName ? "admin" : userName; } } diff --git a/common/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java b/common/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java index 2a3a2b9bc..aed877711 100644 --- a/common/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java +++ b/common/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java @@ -52,7 +52,7 @@ import static dev.langchain4j.model.openai.InternalOpenAiHelper.toOpenAiMessages import static dev.langchain4j.model.openai.InternalOpenAiHelper.toOpenAiResponseFormat; import static dev.langchain4j.model.openai.InternalOpenAiHelper.toTools; import static dev.langchain4j.model.openai.InternalOpenAiHelper.tokenUsageFrom; -import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; +import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO; import static dev.langchain4j.spi.ServiceHelper.loadFactories; import static java.time.Duration.ofSeconds; import static java.util.Collections.emptyList; @@ -111,7 +111,7 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator { .connectTimeout(timeout).readTimeout(timeout).writeTimeout(timeout).proxy(proxy) .logRequests(logRequests).logResponses(logResponses).userAgent(DEFAULT_USER_AGENT) .customHeaders(customHeaders).build(); - this.modelName = getOrDefault(modelName, GPT_3_5_TURBO); + this.modelName = getOrDefault(modelName, GPT_3_5_TURBO.name()); this.apiVersion = apiVersion; this.temperature = getOrDefault(temperature, 0.7); this.topP = topP; @@ -130,7 +130,7 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator { this.strictTools = getOrDefault(strictTools, false); this.parallelToolCalls = parallelToolCalls; this.maxRetries = getOrDefault(maxRetries, 3); - this.tokenizer = getOrDefault(tokenizer, OpenAiTokenizer::new); + this.tokenizer = getOrDefault(tokenizer, () -> new OpenAiTokenizer(this.modelName)); this.listeners = listeners == null ? emptyList() : new ArrayList<>(listeners); }