(improvement)(headless) Add localAi auto-configuration. (#1243)

This commit is contained in:
二毛
2024-06-27 15:09:18 +08:00
committed by GitHub
parent d1ec6badce
commit 20697efac1
5 changed files with 180 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
package dev.langchain4j.localai.spring;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
class ChatModelProperties {
private String baseUrl;
private String apiKey;
private String secretKey;
private Double temperature;
private Integer maxRetries;
private Double topP;
private String modelName;
private String endpoint;
private String responseFormat;
private Double penaltyScore;
private Boolean logRequests;
private Boolean logResponses;
}

View File

@@ -0,0 +1,19 @@
package dev.langchain4j.localai.spring;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
class EmbeddingModelProperties {
private String baseUrl;
private String apiKey;
private String secretKey;
private Integer maxRetries;
private String modelName;
private String endpoint;
private String user;
private Boolean logRequests;
private Boolean logResponses;
}

View File

@@ -0,0 +1,22 @@
package dev.langchain4j.localai.spring;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
class LanguageModelProperties {
private String baseUrl;
private String apiKey;
private String secretKey;
private Double temperature;
private Integer maxRetries;
private Integer topK;
private Double topP;
private String modelName;
private String endpoint;
private Double penaltyScore;
private Boolean logRequests;
private Boolean logResponses;
}

View File

@@ -0,0 +1,89 @@
package dev.langchain4j.localai.spring;
import dev.langchain4j.model.localai.LocalAiChatModel;
import dev.langchain4j.model.localai.LocalAiEmbeddingModel;
import dev.langchain4j.model.localai.LocalAiLanguageModel;
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
import dev.langchain4j.model.localai.LocalAiStreamingLanguageModel;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.localai.spring.Properties.PREFIX;
@Configuration
@EnableConfigurationProperties(Properties.class)
public class LocalAiAutoConfig {
@Bean
@ConditionalOnProperty(PREFIX + ".chat-model.base-url")
LocalAiChatModel localAiChatModel(Properties properties) {
ChatModelProperties chatModelProperties = properties.getChatModel();
return LocalAiChatModel.builder()
.baseUrl(chatModelProperties.getBaseUrl())
.modelName(chatModelProperties.getModelName())
.temperature(chatModelProperties.getTemperature())
.topP(chatModelProperties.getTopP())
.maxRetries(chatModelProperties.getMaxRetries())
.logRequests(chatModelProperties.getLogRequests())
.logResponses(chatModelProperties.getLogResponses())
.build();
}
@Bean
@ConditionalOnProperty(PREFIX + ".streaming-chat-model.base-url")
LocalAiStreamingChatModel localAiStreamingChatModel(Properties properties) {
ChatModelProperties chatModelProperties = properties.getStreamingChatModel();
return LocalAiStreamingChatModel.builder()
.temperature(chatModelProperties.getTemperature())
.topP(chatModelProperties.getTopP())
.baseUrl(chatModelProperties.getBaseUrl())
.modelName(chatModelProperties.getModelName())
.logRequests(chatModelProperties.getLogRequests())
.logResponses(chatModelProperties.getLogResponses())
.build();
}
@Bean
@ConditionalOnProperty(PREFIX + ".language-model.base-url")
LocalAiLanguageModel localAiLanguageModel(Properties properties) {
LanguageModelProperties languageModelProperties = properties.getLanguageModel();
return LocalAiLanguageModel.builder()
.topP(languageModelProperties.getTopP())
.baseUrl(languageModelProperties.getBaseUrl())
.modelName(languageModelProperties.getModelName())
.temperature(languageModelProperties.getTemperature())
.maxRetries(languageModelProperties.getMaxRetries())
.logRequests(languageModelProperties.getLogRequests())
.logResponses(languageModelProperties.getLogResponses())
.build();
}
@Bean
@ConditionalOnProperty(PREFIX + ".streaming-language-model.base-url")
LocalAiStreamingLanguageModel localAiStreamingLanguageModel(Properties properties) {
LanguageModelProperties languageModelProperties = properties.getStreamingLanguageModel();
return LocalAiStreamingLanguageModel.builder()
.topP(languageModelProperties.getTopP())
.baseUrl(languageModelProperties.getBaseUrl())
.modelName(languageModelProperties.getModelName())
.temperature(languageModelProperties.getTemperature())
.logRequests(languageModelProperties.getLogRequests())
.logResponses(languageModelProperties.getLogResponses())
.build();
}
@Bean
@ConditionalOnProperty(PREFIX + ".embedding-model.base-url")
LocalAiEmbeddingModel localAiEmbeddingModel(Properties properties) {
EmbeddingModelProperties embeddingModelProperties = properties.getEmbeddingModel();
return LocalAiEmbeddingModel.builder()
.baseUrl(embeddingModelProperties.getBaseUrl())
.modelName(embeddingModelProperties.getModelName())
.maxRetries(embeddingModelProperties.getMaxRetries())
.logRequests(embeddingModelProperties.getLogRequests())
.logResponses(embeddingModelProperties.getLogResponses())
.build();
}
}

View File

@@ -0,0 +1,29 @@
package dev.langchain4j.localai.spring;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
@Getter
@Setter
@ConfigurationProperties(prefix = Properties.PREFIX)
public class Properties {
static final String PREFIX = "langchain4j.local-ai";
@NestedConfigurationProperty
ChatModelProperties chatModel;
@NestedConfigurationProperty
ChatModelProperties streamingChatModel;
@NestedConfigurationProperty
LanguageModelProperties languageModel;
@NestedConfigurationProperty
LanguageModelProperties streamingLanguageModel;
@NestedConfigurationProperty
EmbeddingModelProperties embeddingModel;
}