diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/chat/mapper/HanlpDictMatchStrategy.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/chat/mapper/HanlpDictMatchStrategy.java index 20171fc0c..7635b6ff7 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/chat/mapper/HanlpDictMatchStrategy.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/chat/mapper/HanlpDictMatchStrategy.java @@ -6,6 +6,12 @@ import com.tencent.supersonic.headless.core.chat.knowledge.HanlpMapResult; import com.tencent.supersonic.headless.core.chat.knowledge.KnowledgeBaseService; import com.tencent.supersonic.headless.core.config.OptimizationConfig; import com.tencent.supersonic.headless.core.pojo.QueryContext; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashSet; @@ -14,11 +20,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang3.StringUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; /** * HanlpDictMatchStrategy uses HanLP to diff --git a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java index ac1757c4b..491a44bc6 100644 --- a/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java +++ b/headless/core/src/main/java/com/tencent/supersonic/headless/core/pojo/QueryStatement.java @@ -33,7 +33,7 @@ public class QueryStatement { private SemanticModel semanticModel; private SemanticSchemaResp semanticSchemaResp; - private Integer limit; + private Integer limit = 1000; public boolean isOk() { this.ok = "".equals(errMsg) && !"".equals(sql); diff --git a/headless/python/instances/llm_instance.py b/headless/python/instances/llm_instance.py index 9cdeb10f4..ad7b0d886 100644 --- a/headless/python/instances/llm_instance.py +++ b/headless/python/instances/llm_instance.py @@ -12,10 +12,15 @@ from config.config_parse import LLM_PROVIDER_NAME, llm_config_dict def get_llm(llm_config: dict): if LLM_PROVIDER_NAME in llms.type_to_cls_dict: llm_provider = llms.type_to_cls_dict[LLM_PROVIDER_NAME] - if llm_config is None: + if llm_config is None or llm_config["baseUrl"] is None or llm_config["baseUrl"] == '': llm = llm_provider(**llm_config_dict) else: - llm = llm_provider(**llm_config) + openai_llm_config = {} + openai_llm_config["model_name"] = llm_config["modelName"] + openai_llm_config["openai_api_base"] = llm_config["baseUrl"] + openai_llm_config["openai_api_key"] = llm_config["apiKey"] + openai_llm_config["temperature"] = llm_config["temperature"] + llm = llm_provider(**openai_llm_config) return llm else: raise Exception("llm_provider_name is not supported: {}".format(LLM_PROVIDER_NAME)) \ No newline at end of file