【LangChain4J】流式输出(底层和高级api两种模式)

响应流官网

底层

//注入StreamingChatModel
@Bean
    public StreamingChatModel streamingChatModel(){
        return OpenAiStreamingChatModel.builder()
                .apiKey(System.getenv("DASHSCOPE_API_KEY"))
                .modelName("qwen-plus")
                .baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
                .build();
    }

@Autowired
    private StreamingChatModel streamingChatModel;

//流式传输响应,实现StreamingChatResponseHandler接口
@GetMapping("/stream/chat1")
    public Flux<String> streamChat1(@RequestParam(value = "prompt",defaultValue = "杭州有什么好吃的") String prompt) {
        return Flux.create(emitter -> {
            streamingChatModel.chat(prompt, new StreamingChatResponseHandler() {
                @Override
                public void onPartialResponse(String s) {
                    emitter.next(s);
                }

                @Override
                public void onCompleteResponse(ChatResponse chatResponse) {
                    emitter.complete();
                }

                @Override
                public void onError(Throwable throwable) {
                    emitter.error(throwable);
                }
            });
        });
    }

高级Api

//第一步编写接口
public interface ChatAssistant {
    Flux<String> streamChat(String prompt);
}

//第二步注入接口实现
@Bean
    public ChatAssistant highStreamAssistant(StreamingChatModel streamingChatModel){
        return AiServices.create(ChatAssistant.class, streamingChatModel);
    }

@Autowired
    private ChatAssistant chatAssistant;

//第三部调用
@GetMapping("/stream/chat2")
    public Flux<String> streamChat2(@RequestParam(value = "prompt",defaultValue = "杭州有什么好吃的") String prompt) {
        return chatAssistant.streamChat(prompt);
    }

posted @ 2025-12-18 20:44  爱吃鱼的大灰狼  阅读(1)  评论(0)    收藏  举报