feat:langchain4j接入

This commit is contained in:
xiang
2026-03-12 23:58:02 +08:00
parent ba859299a4
commit 06d5b6ca63
24 changed files with 569 additions and 128 deletions

79
pom.xml
View File

@@ -36,6 +36,7 @@
<rocketmq.version>2.2.3</rocketmq.version>
<spring.authorization.server.version>0.4.0</spring.authorization.server.version>
<springai.alibaba.version>1.0.0.2</springai.alibaba.version>
<langchain.version>1.12.2</langchain.version>
</properties>
<repositories>
@@ -91,41 +92,30 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<!-- Spring AI -->
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-bom</artifactId>-->
<!-- <version>${spring.ai.version}</version>-->
<!-- <type>pom</type>-->
<!-- <scope>import</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-bom</artifactId>
<type>pom</type>
<scope>import</scope>
<version>${springai.alibaba.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!-- Source: https://mvnrepository.com/artifact/dev.langchain4j/langchain4j-open-ai -->
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-starter-dashscope</artifactId>
<version>${springai.alibaba.version}</version>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>${langchain.version}</version>
<scope>compile</scope>
</dependency>
<!-- Source: https://mvnrepository.com/artifact/dev.langchain4j/langchain4j -->
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-graph-core</artifactId>
<version>${springai.alibaba.version}</version>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>${langchain.version}</version>
<scope>compile</scope>
</dependency>
<!-- Source: https://mvnrepository.com/artifact/dev.langchain4j/langchain4j-ollama -->
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-starter-memory-jdbc</artifactId>
<version>${springai.alibaba.version}</version>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-ollama</artifactId>
<version>${langchain.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -134,27 +124,22 @@
<version>${mysql.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-pdf-document-reader</artifactId>-->
<!-- <version>${spring.ai.version}</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-pgvector-store</artifactId>-->
<!-- <version>${spring.ai.version}</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-pgvector-store-spring-boot-starter</artifactId>-->
<!-- <version>${spring.ai.version}</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-tika-document-reader</artifactId>-->
<!-- <version>${spring.ai.version}</version>-->
<!-- </dependency>-->
<!-- collections -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>32.1.3-jre</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>4.2</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.15.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -3,7 +3,10 @@ package com.xiang;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.ConfigurationPropertiesScan;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.env.Environment;

View File

@@ -1,40 +1,22 @@
package com.xiang.xsa.xservice.ai.server;
import com.xiang.xservice.ai.agent.BaseAgent;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import lombok.RequiredArgsConstructor;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.ChatMemory;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
@RestController
@RequiredArgsConstructor
public class ChatController {
private final ChatClient chatClient;
private final ChatMemory chatMemory;
private final BaseAgent baseAgent;
@PostMapping("/chat")
@GetMapping("/chat")
public String chatDemo(@RequestParam("question") String question) {
return chatClient.prompt(question).call().content();
baseAgent.chat(ModelTypeEnum.OPEN_AI, question);
return "321";
}
@PostMapping(value = "/chatWithFlux")
public Flux<String> chatWithFlux(@RequestParam("question") String question) {
return chatClient.prompt(question).stream().content();
}
@PostMapping("/chatWithConversionId")
public String chatWithConversionId(@RequestParam("sessionId") String sessionId, @RequestParam("question") String question) {
return chatClient.prompt()
.user(question)
.advisors(
MessageChatMemoryAdvisor.builder(chatMemory).conversationId(sessionId).build(), new SimpleLoggerAdvisor()
)
.call().content();
}
}

View File

@@ -1,16 +1,15 @@
spring:
datasource:
url: jdbc:mysql://rm-bp15t34gqx62jm069ro.mysql.rds.aliyuncs.com:3306/xservice-ai-center?useUnicode=true&characterEncoding=utf-8&serverTimezone=Asia/Shanghai&allowMultiQueries=true
username: root
password: xb#UWqnhH24&XpX
driver-class-name: com.mysql.cj.jdbc.Driver
ai:
dashscope:
api-key: sk-07353fd191074c9c930b134230ba88ea
chat:
options:
model: qwen-plus
http:
connect-timeout: 30s
read-timeout: 60s
ai:
openai:
configs:
bai-lian:
apiKey: sk-70cb426d7d1e4b54b4ffe71022e7d815
modelName: qwen3.5-plus
baseUrl: https://dashscope.aliyuncs.com/compatible-mode/v1

View File

@@ -0,0 +1,7 @@
package com.xiang.xservice.ai.agent;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
public interface BaseAgent {
void chat(ModelTypeEnum modelType, String message);
}

View File

@@ -0,0 +1,42 @@
package com.xiang.xservice.ai.agent;
import com.google.common.collect.Lists;
import com.xiang.xservice.ai.config.OpenAIBaseConfig;
import com.xiang.xservice.ai.config.OpenAIConfig;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import com.xiang.xservice.ai.core.handler.MyStreamingHandler;
import com.xiang.xservice.ai.core.route.TaskRouter;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class SimpleChatAgent implements BaseAgent {
private final TaskRouter router;
private final OpenAIBaseConfig openAIBaseConfig;
private final MyStreamingHandler streamingHandler;
@Override
public void chat(ModelTypeEnum modelType, String message) {
OpenAIConfig openAIConfig = openAIBaseConfig.getConfigs().get("bai-lian");
ModelConfig modelConfig = ModelConfig.builder()
.baseUrl(openAIConfig.getBaseUrl())
.apiKey(openAIConfig.getApiKey())
.maxRetries(openAIConfig.getMaxRetries())
.modelName(openAIConfig.getModelName())
.temperature(openAIConfig.getTemperature())
.build();
StreamingChatModel chat = router.route(ModelStrategyEnum.CHAT, modelType.getModelType(), modelConfig);
UserMessage userMessage = new UserMessage(message);
SystemMessage systemMessage = new SystemMessage("你是一个能够与人聊天的AI智能助手名字叫龙虾");
chat.chat(Lists.newArrayList(systemMessage, userMessage), streamingHandler);
}
}

View File

@@ -1,46 +0,0 @@
package com.xiang.xservice.ai.config;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatOptions;
import com.alibaba.cloud.ai.memory.jdbc.MysqlChatMemoryRepository;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.ChatMemory;
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
/**
* 初始化llm客户端
*/
@Configuration
public class LlmClientConfig {
private static final String DEFAULT_PROMPT = "你是一个友好的AI助手可以根据用户提出的问题进行解答";
@Bean
public ChatClient initChatCLient(ChatClient.Builder charClientBuilder, ChatMemory chatMemory) {
return charClientBuilder
.defaultSystem(DEFAULT_PROMPT)
.defaultAdvisors(
new SimpleLoggerAdvisor(),
MessageChatMemoryAdvisor.builder(chatMemory).build()
)
.defaultOptions(
DashScopeChatOptions.builder().withTemperature(1.2).build()
).build();
}
@Bean
public ChatMemory initChatMemory(JdbcTemplate jdbcTemplate) {
return MessageWindowChatMemory.builder()
.chatMemoryRepository(MysqlChatMemoryRepository.mysqlBuilder()
.jdbcTemplate(jdbcTemplate)
.build())
.maxMessages(100)
.build();
}
}

View File

@@ -0,0 +1,18 @@
package com.xiang.xservice.ai.config;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import java.util.Map;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Configuration
@ConfigurationProperties(prefix = "ai.openai")
public class OpenAIBaseConfig {
private Map<String, OpenAIConfig> configs;
}

View File

@@ -0,0 +1,40 @@
package com.xiang.xservice.ai.config;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class OpenAIConfig {
/**
* url
*/
private String baseUrl;
/**
* api-key
*/
private String apiKey;
/**
* 模型名称
*/
private String modelName;
/**
* 最大重试次数
*/
private Integer maxRetries;
/**
* 限制token数量
*/
private Integer maxToken;
/**
* 温度
*/
private Double temperature;
}

View File

@@ -0,0 +1,42 @@
package com.xiang.xservice.ai.core.entity;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class ModelConfig {
/**
* url
*/
private String baseUrl;
/**
* api-key
*/
private String apiKey;
/**
* 模型名称
*/
private String modelName;
/**
* 最大重试次数
*/
private Integer maxRetries;
/**
* 限制token数量
*/
private Integer maxTokens;
/**
* 温度
*/
private Double temperature;
}

View File

@@ -0,0 +1,17 @@
package com.xiang.xservice.ai.core.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public enum ModelStrategyEnum {
CHAT("CHAT_MODEL", "聊天模型"),
REASON("REASON_MODEL", "逻辑推理模型"),
CODE("CODE_MODEL", "代码模型"),
;
private final String name;
private final String desc;
}

View File

@@ -0,0 +1,16 @@
package com.xiang.xservice.ai.core.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public enum ModelTypeEnum {
OPEN_AI("open_ai", "云端部署的支持Open-AI的模型"),
OLLAMA("ollama", "本地部署的Ollama模型"),
;
private final String modelType;
private final String description;
}

View File

@@ -0,0 +1,30 @@
package com.xiang.xservice.ai.core.handler;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import org.checkerframework.checker.units.qual.C;
import org.springframework.stereotype.Component;
@Component
public class MyStreamingHandler implements StreamingChatResponseHandler{
private StringBuilder builder = new StringBuilder();
@Override
public void onPartialResponse(String partialResponse) {
// 每生成一个 token 或 chunk 的回调
builder.append(partialResponse);
System.out.print(partialResponse); // 实时打印
}
@Override
public void onCompleteResponse(ChatResponse completeResponse) {
// 完整响应生成完成
System.out.println("\n生成完成");
System.out.println("最终输出: " + completeResponse.aiMessage().text());
}
@Override
public void onError(Throwable error) {
error.printStackTrace();
}
}

View File

@@ -0,0 +1,28 @@
package com.xiang.xservice.ai.core.provider;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
public interface BaseProvider {
/**
* providerName
* @return
*/
String providerName();
/**
* 创建model
* @param config model配置文件
* @return ChatModel
*/
StreamingChatModel build(ModelConfig config);
/**
* 用于标记这个 Provider 是否适合某个 TaskType
* @param taskType ModelStrategyEnum
* @return true 支持 false不支持
*/
boolean support(ModelStrategyEnum taskType);
}

View File

@@ -0,0 +1,34 @@
package com.xiang.xservice.ai.core.provider;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import org.springframework.stereotype.Component;
@Component
public class OllamaLlmProvider implements BaseProvider{
@Override
public String providerName() {
return ModelTypeEnum.OLLAMA.getModelType();
}
@Override
public StreamingChatModel build(ModelConfig config) {
return OllamaStreamingChatModel.builder()
.baseUrl(config.getBaseUrl())
.modelName(config.getModelName())
.temperature(config.getTemperature())
.build();
}
@Override
public boolean support(ModelStrategyEnum taskType) {
return true;
}
}

View File

@@ -0,0 +1,37 @@
package com.xiang.xservice.ai.core.provider;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import org.springframework.stereotype.Component;
@Component
public class OpenAILlmProvider implements BaseProvider {
@Override
public String providerName() {
return ModelTypeEnum.OPEN_AI.getModelType();
}
@Override
public StreamingChatModel build(ModelConfig config) {
return OpenAiStreamingChatModel.builder()
.baseUrl(config.getBaseUrl())
.apiKey(config.getApiKey())
.modelName(config.getModelName())
.temperature(config.getTemperature())
.maxTokens(config.getMaxTokens())
.build();
}
@Override
public boolean support(ModelStrategyEnum taskType) {
return true;
}
}

View File

@@ -0,0 +1,49 @@
package com.xiang.xservice.ai.core.route;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.provider.BaseProvider;
import com.xiang.xservice.ai.core.strategy.BaseStrategy;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Component
public class ModelRouter {
private final Map<String, BaseProvider> providerMap;
public ModelRouter(List<BaseProvider> providers) {
this.providerMap = providers.stream()
.collect(Collectors.toMap(
BaseProvider::providerName,
p -> p
));
}
/**
* 根据 providerName 选择 Provider并用 config 构建模型
*/
public StreamingChatModel route(String providerName, ModelConfig config) {
BaseProvider provider = providerMap.get(providerName);
if (provider == null) {
throw new RuntimeException("Provider " + providerName + " not found");
}
return provider.build(config);
}
/**
* 获取某个 TaskType 支持的所有 Provider 名称
*/
public List<String> getProvidersForTask(ModelStrategyEnum taskType) {
return providerMap.values().stream()
.filter(p -> p.support(taskType))
.map(BaseProvider::providerName)
.collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,44 @@
package com.xiang.xservice.ai.core.route;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.provider.BaseProvider;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import org.checkerframework.checker.units.qual.C;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Component
public class TaskRouter {
private final Map<ModelStrategyEnum, List<String>> taskDefaultProviderMap;
private final ModelRouter providerRouter;
public TaskRouter(ModelRouter providerRouter, List<BaseProvider> providers) {
this.providerRouter = providerRouter;
taskDefaultProviderMap = new HashMap<>();
for (ModelStrategyEnum taskType : ModelStrategyEnum.values()) {
// 默认使用第一个可用的 Provider
List<String> supported = providerRouter.getProvidersForTask(taskType);
taskDefaultProviderMap.put(taskType, supported);
}
}
/**
* 第一层路由 + 第二层 ProviderRouter
*/
public StreamingChatModel route(ModelStrategyEnum taskType, String providerName, ModelConfig config) {
// 如果没有传 providerName使用默认 Provider
if (providerName == null || providerName.isEmpty()) {
providerName = taskDefaultProviderMap.get(taskType).get(0);
}
// 第二层路由
return providerRouter.route(providerName, config);
}
}

View File

@@ -0,0 +1,12 @@
package com.xiang.xservice.ai.core.strategy;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
public interface BaseStrategy {
String modelType();
StreamingChatModel createProvider(String provider, ModelConfig config);
}

View File

@@ -0,0 +1,27 @@
package com.xiang.xservice.ai.core.strategy;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.provider.BaseProvider;
import com.xiang.xservice.ai.core.route.TaskRouter;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
@Component
@RequiredArgsConstructor
public class ChatStrategy implements BaseStrategy {
private final TaskRouter taskRouter;
@Override
public String modelType() {
return ModelStrategyEnum.CHAT.getName();
}
@Override
public StreamingChatModel createProvider(String provider, ModelConfig config) {
return taskRouter.route(ModelStrategyEnum.CHAT, provider, config);
}
}

View File

@@ -0,0 +1,20 @@
package com.xiang.xservice.ai.core.strategy;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import org.springframework.stereotype.Component;
@Component
public class CodeStrategy implements BaseStrategy {
@Override
public String modelType() {
return ModelStrategyEnum.CODE.getName();
}
@Override
public StreamingChatModel createProvider(String provider, ModelConfig config) {
return null;
}
}

View File

@@ -0,0 +1,20 @@
package com.xiang.xservice.ai.core.strategy;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import org.springframework.stereotype.Component;
@Component
public class ReasonStrategy implements BaseStrategy {
@Override
public String modelType() {
return ModelStrategyEnum.REASON.getName();
}
@Override
public StreamingChatModel createProvider(String provider, ModelConfig config) {
return null;
}
}

View File

@@ -0,0 +1,16 @@
package com.xiang.xservice.ai.service;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelStrategyEnum;
import com.xiang.xservice.ai.core.route.TaskRouter;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.service.AiServices;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class AgentService {
}

View File

@@ -0,0 +1,19 @@
package com.xiang.xservice.ai.service;
import com.xiang.xservice.ai.core.entity.ModelConfig;
import com.xiang.xservice.ai.core.enums.ModelTypeEnum;
import com.xiang.xservice.ai.core.route.ModelRouter;
import dev.langchain4j.model.chat.ChatModel;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class AiService {
// private final ModelRouter modelRouter;
//
// private ChatModel createChat(ModelTypeEnum modelType, ModelConfig modelConfig) {
// return modelRouter.create(modelType.getModelType(), modelConfig);
// }
}