Skip to content
章节导航

SpringAI Alibaba 双模型一字一句的回应

SimpleAiController 类添加流式聊天接口

java
import jakarta.servlet.http.HttpServletResponse;
import lombok.RequiredArgsConstructor;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

/**
 * 简单的 AI 控制器
 *
 * @author 朔风
 * @date 2026-03-31 16:04
 */
@RestController
@RequestMapping("/api/ai")
@RequiredArgsConstructor
public class SimpleAiController {

    private final ChatClient dashscopeChatClient;
    private final ChatClient ollamaChatClient;

    /**
     * 同步响应
     */
    @GetMapping(value = "/simple/chat", produces = "text/html;charset=utf-8")
    public String simpleChat(@RequestParam("question") String question) {
        // response.setCharacterEncoding("utf-8");
        return dashscopeChatClient.prompt(question).call().content();
    }

    /**
     * Ollama
     */
    @GetMapping(value = "/ollamaSimpleChat/chat", produces = "text/html;charset=utf-8")
    public String ollamaSimpleChat(@RequestParam("question") String question, HttpServletResponse response) {
        return ollamaChatClient.prompt(question).call().content();
    }

    /**
     * 流式聊天接口
     */
    @GetMapping(value = "/stream/chat", produces = "text/html;charset=utf-8")
    public Flux<String> streamChat(@RequestParam("question") String question, HttpServletResponse response) {
        return dashscopeChatClient.prompt(question).stream().content();
    }

    /**
     * Ollama 流式聊天接口
     */
    @GetMapping(value = "/ollamaStreamChat/chat", produces = "text/html;charset=utf-8")
    public Flux<String> ollamaStreamChat(@RequestParam("question") String question, HttpServletResponse response) {
        return ollamaChatClient.prompt(question).stream().content();
    }

}

访问

访问阿里云模型

shell
http://localhost:8080/api/ai/stream/chat?question=你是谁

访问本地模型

shell
http://localhost:8080/api/ai/ollamaStreamChat/chat?question=你是谁