1、配置适配jdk8的依赖
<dependency><groupId>io.github.lnyo-cly</groupId><artifactId>ai4j-spring-boot-stater</artifactId><version>0.7.0</version> </dependency>
2、配置bootstrap.yml
ai:ollama:api-host: http://localhost:11434
3、编写接口
package com.ronshi.ai;import io.github.lnyocly.ai4j.listener.SseListener;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletion;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletionResponse;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatMessage;
import io.github.lnyocly.ai4j.service.IChatService;
import io.github.lnyocly.ai4j.service.PlatformType;
import io.github.lnyocly.ai4j.service.factor.AiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;import javax.servlet.http.HttpServletResponse;
import java.io.PrintWriter;/*** @author ronshi* @date 2025/2/17 16:31*/
@RestController
public class AiController {@Autowiredprivate AiService aiService;@GetMapping("/chat")public String getChatMessage(@RequestParam String question) throws Exception {// 获取OLLAMA的聊天服务IChatService chatService = aiService.getChatService(PlatformType.OLLAMA);// 创建请求参数ChatCompletion chatCompletion = ChatCompletion.builder().model("deepseek-r1:1.5b").message(ChatMessage.withUser(question)).build();System.out.println(chatCompletion);// 发送chat请求ChatCompletionResponse chatCompletionResponse = chatService.chatCompletion(chatCompletion);// 获取聊天内容和token消耗String content = chatCompletionResponse.getChoices().get(0).getMessage().getContent();long totalTokens = chatCompletionResponse.getUsage().getTotalTokens();System.out.println("总token消耗: " + totalTokens);return content;}@GetMapping("/chatStream")public void getChatMessageStream(@RequestParam String question, HttpServletResponse response) throws Exception {// 中文乱码问题response.setCharacterEncoding("UTF-8");// 获取OLLAMA的聊天服务IChatService chatService = aiService.getChatService(PlatformType.OLLAMA);// 创建请求参数ChatCompletion chatCompletion = ChatCompletion.builder().model("deepseek-r1:1.5b").message(ChatMessage.withUser(question)).build();PrintWriter writer = response.getWriter();// 发送chat请求SseListener sseListener = new SseListener() {@Overrideprotected void send() {writer.write(this.getCurrStr());writer.flush();System.out.println(this.getCurrStr());}};chatService.chatCompletionStream(chatCompletion, sseListener);writer.close();System.out.println(sseListener.getOutput());}}
4、普通输出
http://localhost:8080/chat?question=地球的组成
5、流式输出
<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"><title>人工智能</title>
</head>
<body>
<input id="question" type="text" placeholder="输入需要提问的问题"/><button id="startButton">开始</button><div id="output"></div><script>const input = document.getElementById("question");const outputDiv = document.getElementById('output');const startButton = document.getElementById('startButton');async function getResponse(){const question = input.value;const resp = await fetch("http://localhost:8080/chatStream" + "?question=" + question,{method: 'GET'})const reader = resp.body.getReader();const textDecoder = new TextDecoder();while (1){const { done , value } = await reader.read()if(done) break;const str = textDecoder.decode(value);outputDiv.innerText += str;console.log(str)}}startButton.addEventListener("click", getResponse)
</script>
</body>
</html>