From 32e2c1e39d27f94dfb677cfd512cb57f68830329 Mon Sep 17 00:00:00 2001 From: lexluo09 <39718951+lexluo09@users.noreply.github.com> Date: Sat, 22 Jun 2024 16:59:15 +0800 Subject: [PATCH] (improvement)(common) Fixed the compatibility issue with qwen. (#1193) --- .../chat/server/parser/MultiTurnParser.java | 60 +++++++++---------- common/pom.xml | 6 ++ .../parser/llm/OnePassSCSqlGenStrategy.java | 4 +- .../src/main/resources/application-local.yaml | 11 +++- pom.xml | 6 ++ 5 files changed, 53 insertions(+), 34 deletions(-) diff --git a/chat/server/src/main/java/com/tencent/supersonic/chat/server/parser/MultiTurnParser.java b/chat/server/src/main/java/com/tencent/supersonic/chat/server/parser/MultiTurnParser.java index a90b727d3..c79201821 100644 --- a/chat/server/src/main/java/com/tencent/supersonic/chat/server/parser/MultiTurnParser.java +++ b/chat/server/src/main/java/com/tencent/supersonic/chat/server/parser/MultiTurnParser.java @@ -1,55 +1,54 @@ package com.tencent.supersonic.chat.server.parser; +import static com.tencent.supersonic.chat.server.parser.ParserConfig.PARSER_MULTI_TURN_ENABLE; + import com.tencent.supersonic.chat.server.agent.MultiTurnConfig; import com.tencent.supersonic.chat.server.persistence.repository.ChatQueryRepository; import com.tencent.supersonic.chat.server.pojo.ChatParseContext; import com.tencent.supersonic.chat.server.util.QueryReqConverter; -import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.config.LLMConfig; +import com.tencent.supersonic.common.util.ContextUtils; +import com.tencent.supersonic.common.util.S2ChatModelProvider; import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch; import com.tencent.supersonic.headless.api.pojo.SchemaElementType; import com.tencent.supersonic.headless.api.pojo.request.QueryReq; import com.tencent.supersonic.headless.api.pojo.response.MapResp; import com.tencent.supersonic.headless.api.pojo.response.ParseResp; -import com.tencent.supersonic.common.util.S2ChatModelProvider; import com.tencent.supersonic.headless.server.service.ChatQueryService; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.output.Response; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.Collections; - -import static com.tencent.supersonic.chat.server.parser.ParserConfig.PARSER_MULTI_TURN_ENABLE; - @Slf4j public class MultiTurnParser implements ChatParser { private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline"); private static final String instruction = "" - + "#Role: You are a data product manager experienced in data requirements.\n" - + "#Task: Your will be provided with current and history questions asked by a user," - + "along with their mapped schema elements(metric, dimension and value)," - + "please try understanding the semantics and rewrite a question.\n" - + "#Rules: " - + "1.ALWAYS keep relevant entities, metrics, dimensions, values and date ranges. " - + "2.ONLY respond with the rewritten question.\n" - + "#Current Question: %s\n" - + "#Current Mapped Schema: %s\n" - + "#History Question: %s\n" - + "#History Mapped Schema: %s\n" - + "#History SQL: %s\n" - + "#Rewritten Question: "; + + "#Role: You are a data product manager experienced in data requirements.\n" + + "#Task: Your will be provided with current and history questions asked by a user," + + "along with their mapped schema elements(metric, dimension and value)," + + "please try understanding the semantics and rewrite a question.\n" + + "#Rules: " + + "1.ALWAYS keep relevant entities, metrics, dimensions, values and date ranges. " + + "2.ONLY respond with the rewritten question.\n" + + "#Current Question: %s\n" + + "#Current Mapped Schema: %s\n" + + "#History Question: %s\n" + + "#History Mapped Schema: %s\n" + + "#History SQL: %s\n" + + "#Rewritten Question: "; @Override public void parse(ChatParseContext chatParseContext, ParseResp parseResp) { @@ -79,13 +78,13 @@ public class MultiTurnParser implements ChatParser { String histMapStr = generateSchemaPrompt(lastParseResult.getSelectedParses().get(0).getElementMatches()); String histSQL = lastParseResult.getSelectedParses().get(0).getSqlInfo().getCorrectS2SQL(); String rewrittenQuery = rewriteQuery(RewriteContext.builder() - .curtQuestion(currentMapResult.getQueryText()) - .histQuestion(lastParseResult.getQueryText()) - .curtSchema(curtMapStr) - .histSchema(histMapStr) - .histSQL(histSQL) - .llmConfig(queryReq.getLlmConfig()) - .build()); + .curtQuestion(currentMapResult.getQueryText()) + .histQuestion(lastParseResult.getQueryText()) + .curtSchema(curtMapStr) + .histSchema(histMapStr) + .histSQL(histSQL) + .llmConfig(queryReq.getLlmConfig()) + .build()); chatParseContext.setQueryText(rewrittenQuery); log.info("Last Query: {} Current Query: {}, Rewritten Query: {}", lastParseResult.getQueryText(), currentMapResult.getQueryText(), rewrittenQuery); @@ -98,7 +97,7 @@ public class MultiTurnParser implements ChatParser { keyPipelineLog.info("MultiTurnParser reqPrompt:{}", promptStr); ChatLanguageModel chatLanguageModel = S2ChatModelProvider.provide(context.getLlmConfig()); - Response response = chatLanguageModel.generate(prompt.toSystemMessage()); + Response response = chatLanguageModel.generate(prompt.toUserMessage()); String result = response.content().text(); keyPipelineLog.info("MultiTurnParser modelResp:{}", result); @@ -144,6 +143,7 @@ public class MultiTurnParser implements ChatParser { @Data @Builder public static class RewriteContext { + private String curtQuestion; private String histQuestion; private String curtSchema; diff --git a/common/pom.xml b/common/pom.xml index afd0affd6..2b51c88b4 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -228,6 +228,12 @@ dev.langchain4j langchain4j-dashscope + + + org.slf4j + slf4j-simple + + diff --git a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java index 1c73244a7..0f2f49130 100644 --- a/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java +++ b/headless/chat/src/main/java/com/tencent/supersonic/headless/chat/parser/llm/OnePassSCSqlGenStrategy.java @@ -39,9 +39,9 @@ public class OnePassSCSqlGenStrategy extends SqlGenStrategy { //3.perform multiple self-consistency inferences parallelly Map prompt2Output = new ConcurrentHashMap<>(); prompt2Exemplar.keySet().parallelStream().forEach(prompt -> { - keyPipelineLog.info("OnePassSCSqlGenStrategy reqPrompt:\n{}", prompt.toSystemMessage()); + keyPipelineLog.info("OnePassSCSqlGenStrategy reqPrompt:\n{}", prompt.toUserMessage()); ChatLanguageModel chatLanguageModel = getChatLanguageModel(llmReq.getLlmConfig()); - Response response = chatLanguageModel.generate(prompt.toSystemMessage()); + Response response = chatLanguageModel.generate(prompt.toUserMessage()); String result = response.content().text(); prompt2Output.put(prompt, result); keyPipelineLog.info("OnePassSCSqlGenStrategy modelResp:\n{}", result); diff --git a/launchers/standalone/src/main/resources/application-local.yaml b/launchers/standalone/src/main/resources/application-local.yaml index 0ba6f37da..b74856229 100644 --- a/launchers/standalone/src/main/resources/application-local.yaml +++ b/launchers/standalone/src/main/resources/application-local.yaml @@ -99,7 +99,14 @@ langchain4j: model-name: ${OPENAI_MODEL_NAME:gpt-3.5-turbo} temperature: ${OPENAI_TEMPERATURE:0.0} timeout: ${OPENAI_TIMEOUT:PT60S} -# java.lang.RuntimeException: dev.ai4j.openai4j.OpenAiHttpException: Too many requests # embedding-model: # base-url: ${OPENAI_API_BASE:https://api.openai.com/v1} -# api-key: ${OPENAI_API_KEY:demo} \ No newline at end of file +# api-key: ${OPENAI_API_KEY:demo} +# dashscope: +# chat-model: +# api-key: ${OPENAI_API_KEY:demo} +# model-name: qwen-max-1201 +# embedding-model: +# api-key: ${OPENAI_API_KEY:demo} + + diff --git a/pom.xml b/pom.xml index c0ff526de..80856ba1b 100644 --- a/pom.xml +++ b/pom.xml @@ -199,6 +199,12 @@ dev.langchain4j langchain4j-dashscope ${langchain4j.version} + + + org.slf4j + slf4j-simple + +