Dict.CN 在线词典, 英语学习, 在线翻译 ------------- MyGitee My腾云code

Happy_EveryDay

可以平凡 不可以平庸 无爱则无忧,无欲则无求,无怒而无敌,无怨才是佛。所有烦恼,都是放不下的执著 开源技群 328035181 MyGitee

SBAI-MultiPlatformAndModel 20260424

1、pom

<properties>
<java.version>17</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<spring-boot.version>3.2.0</spring-boot.version>
<spring-ai.version>1.1.2</spring-ai.version>
<spring-ai-alibaba.version>1.1.2.2</spring-ai-alibaba.version>
<spring-ai-alibaba-extensions.version>1.1.2.2</spring-ai-alibaba-extensions.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<!-- 阿里云通义千问(DashScope)starter -->
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-starter-dashscope</artifactId>
</dependency>
<!-- deepseek starter -->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-deepseek</artifactId>
</dependency>
<!-- 新增:Ollama 的 Starter -->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-ollama</artifactId>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<!-- 统一管理Spring AI依赖版本 -->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-bom</artifactId>
<version>${spring-ai-alibaba.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-extensions-bom</artifactId>
<version>${spring-ai-alibaba-extensions.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<!-- Spring AI 里程碑/快照仓库(必须配置,否则依赖无法下载) -->
<repositories>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>https://repo.spring.io/snapshot</url>
<releases>
<enabled>false</enabled>
</releases>
</repository>
</repositories>



2、yml

server:
port: 18082
spring:
ai:
dashscope:
api-key: sk-8718a83408d7443b9544cdfbXXXXX
deepseek: ## 这一行是你选择的LLM模型,如果是openai,这里就填openai, base-url就是填对应厂商的地址
api-key: "sk-199324596dbb4308afcb77d4XXXXX"
base-url: "https://api.deepseek.com"
chat:
options:
model: deepseek-chat
embedding:
enabled: false
ollama:
chat:
options:
model: gemma3:4b
#model: qwen2.5vl:3b moondream:latest qwen2.5vl:3b-q4_K_M moondream:v2
#model: qwen2:0.5b
#temperature: 0.1
# 指定默认使用的模型,也可以在代码中动态覆盖
#model: gemma3:4b
####最小的轻量多模态模型(约 1.5GB)
#model: minicpm-v:latest
base-url: http://192.168.91.164:11434
main: #允许 Bean 覆盖
allow-bean-definition-overriding: true

####api-key填写自个

3、tool
 
public class MultiPlatformAndModelOptions {
private String platform; // 平台标识:dashscope/deepseek/ollama
private String model; // 模型名称:qwen-plus/deepseek-chat/llama3
private Double temperature; // 温度参数,控制生成随机性

public String getPlatform() {
return platform;
}

public void setPlatform(String platform) {
this.platform = platform;
}

public String getModel() {
return model;
}

public void setModel(String model) {
this.model = model;
}

public Double getTemperature() {
return temperature;
}

public void setTemperature(Double temperature) {
this.temperature = temperature;
}

}


4、Controller
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatModel;
import com.sb.multiplatform18083.tool.MultiPlatformAndModelOptions;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.deepseek.DeepSeekChatModel;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import java.util.HashMap;
import java.util.Map;

@RestController
public class MultiPlatformAndModelController {

private final Map<String, ChatModel> platforms = new HashMap<>();

// 构造器注入所有 ChatModel
public MultiPlatformAndModelController(
DashScopeChatModel dashScopeChatModel,
DeepSeekChatModel deepSeekChatModel,
OllamaChatModel ollamaChatModel) {
platforms.put("dashscope", dashScopeChatModel);
platforms.put("ollama", ollamaChatModel);
platforms.put("deepseek", deepSeekChatModel);
}


@GetMapping(value = "/chat", produces = "text/event-stream;charset=UTF-8")
public Flux<String> chat(
@RequestParam(value = "message") String message,
@RequestParam(value = "platform") String platform,
@RequestParam(value = "model") String model,
@RequestParam(value = "temperature", defaultValue = "0.7") Float temperature
, HttpServletResponse response // 1. 注入 HttpServletResponse
) {
// 2. 【核心修复】强制设置响应内容类型和编码
response.setContentType("text/event-stream;charset=UTF-8");
// 防止浏览器或中间件缓存流数据
response.setHeader("Cache-Control", "no-cache");
// 手动封装参数(比直接用对象更稳定,不会出现参数解析失败)
MultiPlatformAndModelOptions options = new MultiPlatformAndModelOptions();
options.setPlatform(platform);
options.setModel(model);
options.setTemperature(Double.valueOf(temperature));

// 1. 根据平台获取对应的 ChatModel
ChatModel chatModel = platforms.get(options.getPlatform());
if (chatModel == null) {
return Flux.just("错误:不支持的平台:" + options.getPlatform());
}

// 2. 构建 ChatClient,动态配置模型参数
ChatClient chatClient = ChatClient.builder(chatModel)
.defaultOptions(ChatOptions.builder()
.temperature(options.getTemperature())
.model(options.getModel())
.build())
.build();

// 3. 发起流式对话请求
return chatClient.prompt()
.user(message)
.stream()
.content();
}

}

 
5、浏览器
http://localhost:18083/chat?message=%E6%9E%97%E8%8A%9D%E4%B8%80%E4%B8%AD&platform=dashscope&model=qwen-plus&temperature=0.8

image

 

http://localhost:18083/chat?message=%E6%9E%97%E8%8A%9D%E4%B8%80%E4%B8%AD&platform=deepseek&model=deepseek-chat&temperature=0.8

image

 

posted on 2026-04-24 18:08  cn2025  阅读(12)  评论(0)    收藏  举报

导航