(improvement)(Headless) The python module supports reading the LLM information of the agent configuration (#1067)

Co-authored-by: jolunoluo
This commit is contained in:
LXW
2024-05-31 21:20:15 +08:00
committed by GitHub
parent c25dbc2cef
commit f0f57d09eb
3 changed files with 14 additions and 8 deletions

View File

@@ -12,10 +12,15 @@ from config.config_parse import LLM_PROVIDER_NAME, llm_config_dict
def get_llm(llm_config: dict):
if LLM_PROVIDER_NAME in llms.type_to_cls_dict:
llm_provider = llms.type_to_cls_dict[LLM_PROVIDER_NAME]
if llm_config is None:
if llm_config is None or llm_config["baseUrl"] is None or llm_config["baseUrl"] == '':
llm = llm_provider(**llm_config_dict)
else:
llm = llm_provider(**llm_config)
openai_llm_config = {}
openai_llm_config["model_name"] = llm_config["modelName"]
openai_llm_config["openai_api_base"] = llm_config["baseUrl"]
openai_llm_config["openai_api_key"] = llm_config["apiKey"]
openai_llm_config["temperature"] = llm_config["temperature"]
llm = llm_provider(**openai_llm_config)
return llm
else:
raise Exception("llm_provider_name is not supported: {}".format(LLM_PROVIDER_NAME))