pom文件
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><parent><groupId>org.springframework.boot</groupId><artifactId>spring-boot-starter-parent</artifactId><version>3.2.4</version><relativePath/> <!-- lookup parent from repository --></parent><groupId>com.bjpowernode</groupId><artifactId>spring-ai-ollama</artifactId><version>0.0.1-SNAPSHOT</version><name>spring-ai-ollama</name><description>spring-ai-ollama</description><properties><java.version>17</java.version><spring-ai.version>1.0.0-SNAPSHOT</spring-ai.version></properties><dependencies><dependency><groupId>org.springframework.boot</groupId><artifactId>spring-boot-starter-web</artifactId></dependency><dependency><groupId>org.springframework.ai</groupId><artifactId>spring-ai-ollama-spring-boot-starter</artifactId></dependency><dependency><groupId>org.springframework.boot</groupId><artifactId>spring-boot-devtools</artifactId><scope>runtime</scope><optional>true</optional></dependency><dependency><groupId>org.projectlombok</groupId><artifactId>lombok</artifactId><optional>true</optional></dependency><dependency><groupId>org.springframework.boot</groupId><artifactId>spring-boot-starter-test</artifactId><scope>test</scope></dependency></dependencies><dependencyManagement><dependencies><dependency><groupId>org.springframework.ai</groupId><artifactId>spring-ai-bom</artifactId><version>${spring-ai.version}</version><type>pom</type><scope>import</scope></dependency></dependencies></dependencyManagement><build><plugins><plugin><groupId>org.springframework.boot</groupId><artifactId>spring-boot-maven-plugin</artifactId><configuration><excludes><exclude><groupId>org.projectlombok</groupId><artifactId>lombok</artifactId></exclude></excludes></configuration></plugin></plugins></build><!--配置本项目的仓库:因为maven中心仓库还没有更新spring ai的jar包--><repositories><!--快照版本的仓库--><repository><id>spring-snapshot</id><name>Spring Snapshots</name><url>https://repo.spring.io/snapshot</url><releases><enabled>false</enabled></releases></repository></repositories></project>
配置文件:application.yml
spring:application:name: spring-ai-ollamaai:ollama:base-url: http://localhost:11434chat:options:model: qwen2.5-coder:7b
Demo
package com.bjpowernode.controller;import jakarta.annotation.Resource;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;@RestController
public class OllamaController {@Resourceprivate OllamaChatModel ollamaChatClient;@Resourceprivate ChatModel chatClient;@RequestMapping(value = "/ai/ollama")public Object ollama(@RequestParam(value = "msg") String msg) {String called = ollamaChatClient.call(msg);System.out.println(called);return called;}@RequestMapping(value = "/ai/ollama2")public Object ollama2(@RequestParam(value = "msg") String msg) {ChatResponse chatResponse = ollamaChatClient.call(new Prompt(msg, OllamaOptions.builder().model("qwen2.5-coder:7b") //使用哪个大模型.temperature(0.4).build())); //温度,温度值越高,准确率下降,温度值越低,准确率会提高System.out.println(chatResponse.getResult().getOutput().getText());return chatResponse.getResult().getOutput().getText();}}
运行项目 测试demo
我这里ollama部署的是 qwen2.5-coder:7b
你们可以根据自己部署的模型更改代码中的配置进行访问
前文: Windows部署千问大模型