deepseek-sb20260408
1、pom
<properties>
<java.version>17</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<spring-boot.version>3.2.0</spring-boot.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-deepseek</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
2、yml
server:
port: 8081
spring:
application:
ai:
deepseek: ## 这一行是你选择的LLM模型,如果是openai,这里就填openai, base-url就是填对应厂商的地址
api-key: "sk-199324596dbb4308afcb77d46GGGGGGG"
base-url: "https://api.deepseek.com"
chat:
options:
model: deepseek-chat
embedding:
enabled: false
注:api-key 请用自己的key
3、controller
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import reactor.core.publisher.Flux;
@RequestMapping("/openai")
@ResponseBody
@Controller
public class DeepSeekChatModelController {
private final ChatModel deepSeekChatModel;
// 主要就是这个地方,springboot已经把yml里的配置,生成好一个叫ChatModel的bean,注入进来controller里就可以直接使用了
public DeepSeekChatModelController(ChatModel chatModel) {
this.deepSeekChatModel = chatModel;
}
// 这个是同步等待LLM的结果,再回复给前端。
@GetMapping("/simple/chat/{prompt}")
public String simpleChat (@PathVariable(value = "prompt") String prompt) {
return deepSeekChatModel.call(new Prompt(prompt)).
getResult().getOutput().getText();
}
/**
* Stream 流式调用。可以使大模型的输出信息实现打字机效果。
* 这个就是sse方式回复内容给前端,就不用等所有的内容都收到才给前端
* @return Flux<String> types.
*/
@GetMapping("/stream/chat/{prompt}")
public Flux<String> streamChat (
@PathVariable(value = "prompt") String prompt,
HttpServletResponse response) {
response.setCharacterEncoding("UTF-8");
Flux<ChatResponse> stream = deepSeekChatModel.stream(new Prompt(prompt));
return stream.map(resp -> resp.getResult().getOutput().getText());
}
}
4、浏览器
http://localhost:8081/openai/stream/chat/%E6%9E%97%E8%8A%9D%E5%A4%A9%E6%B0%94

http://localhost:8081/simple/stream/chat/%E6%9E%97%E8%8A%9D%E5%A4%A9%E6%B0%94

浙公网安备 33010602011771号