Ollama是一个用于大数据和机器学习的平台,它可以帮助企业进行数据处理、分析和决策制定。
1、在Spring Boot项目pom.xml中添加Ollama客户端库依赖
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
2、在yaml文件中配置ollama的地址和模型
spring:
ai:
ollama:
base-url: http://127.0.0.1:11434
chat:
model: qwen:14b
配置文件指定了 Ollama API 地址和端口,同时指定了默认模型qwen:14b
3、 创建Controller,使用OllamaChatClient进行文字生成或者对话
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class ChatClientController {
@Autowired
@Qualifier("ollamaChatClient")
private OllamaChatClient ollamaChatClient;
/**
*
*/
@GetMapping("/ollama/chat/v1")
public String ollamaChat(@RequestParam String msg) {
return this.ollamaChatClient.call(msg);
}
/**
* prompt模板功能
*/
@GetMapping("/ollama/chat/v2")
public Object ollamaChatV2(@RequestParam String msg) {
Prompt prompt = new Prompt(msg);
ChatResponse chatResponse = ollamaChatClient.call(prompt);
return chatResponse;
}
/**
*
*/
@GetMapping("/ollama/chat/v3")
public Object ollamaChatV3(@RequestParam String msg) {
Prompt prompt = new Prompt(
msg,
OllamaOptions.create()
.withModel("qwen:14b")
.withTemperature(0.4F));
ChatResponse chatResponse = ollamaChatClient.call(prompt);
return chatResponse.getResult().getOutput().getContent();
}
}