mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-10 11:07:06 +00:00
(improvement)(Headless) The python module supports reading the LLM information of the agent configuration (#1067)
Co-authored-by: jolunoluo
This commit is contained in:
@@ -6,6 +6,12 @@ import com.tencent.supersonic.headless.core.chat.knowledge.HanlpMapResult;
|
||||
import com.tencent.supersonic.headless.core.chat.knowledge.KnowledgeBaseService;
|
||||
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryContext;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
@@ -14,11 +20,6 @@ import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* HanlpDictMatchStrategy uses <a href="https://www.hanlp.com/">HanLP</a> to
|
||||
|
||||
@@ -33,7 +33,7 @@ public class QueryStatement {
|
||||
private SemanticModel semanticModel;
|
||||
|
||||
private SemanticSchemaResp semanticSchemaResp;
|
||||
private Integer limit;
|
||||
private Integer limit = 1000;
|
||||
|
||||
public boolean isOk() {
|
||||
this.ok = "".equals(errMsg) && !"".equals(sql);
|
||||
|
||||
@@ -12,10 +12,15 @@ from config.config_parse import LLM_PROVIDER_NAME, llm_config_dict
|
||||
def get_llm(llm_config: dict):
|
||||
if LLM_PROVIDER_NAME in llms.type_to_cls_dict:
|
||||
llm_provider = llms.type_to_cls_dict[LLM_PROVIDER_NAME]
|
||||
if llm_config is None:
|
||||
if llm_config is None or llm_config["baseUrl"] is None or llm_config["baseUrl"] == '':
|
||||
llm = llm_provider(**llm_config_dict)
|
||||
else:
|
||||
llm = llm_provider(**llm_config)
|
||||
openai_llm_config = {}
|
||||
openai_llm_config["model_name"] = llm_config["modelName"]
|
||||
openai_llm_config["openai_api_base"] = llm_config["baseUrl"]
|
||||
openai_llm_config["openai_api_key"] = llm_config["apiKey"]
|
||||
openai_llm_config["temperature"] = llm_config["temperature"]
|
||||
llm = llm_provider(**openai_llm_config)
|
||||
return llm
|
||||
else:
|
||||
raise Exception("llm_provider_name is not supported: {}".format(LLM_PROVIDER_NAME))
|
||||
Reference in New Issue
Block a user