Windows安装ollama和AnythingLLM
下载安装包
下载上面的安装包ollama&AnythingLLM.zip 解压如下
安装ollama
双击OllamaSetup.exe安装即可,安装完成后再命令行查看
ollama --version
即安装成功
运行模型
常用的模型如下
我们选择常用的llama2
ollama run llama2
安装好了之后便可以直接提问
查看端口供外部访问
ollama show --help
安装AnythingLLM
运行AnythingLLMDesktop.exe即可
打开如下
新建工作区
配置模型
开始访问
自定义文本库
编辑一个txt,内容如下
上传该文档
springai
依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.2.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.cyz</groupId>
<artifactId>springai</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>springai</name>
<description>springai</description>
<properties>
<java.version>17</java.version>
<spring-ai.version>0.8.1</spring-ai.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
</project>
配置
server.port=8080
spring.application.name=springai
# ollama 服务地址
spring.ai.ollama.base-url=http://127.0.0.1:11434
#模型要写,这边用的是 llama2
spring.ai.ollama.chat.model=llama2
启动类
package com.cyz.springai;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringaiApplication {
public static void main(String[] args) {
SpringApplication.run(SpringaiApplication.class, args);
}
}
测试类
package com.cyz.springai;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
/**
* @author cyz
* @since 2024/9/11 下午12:00
*/
@RestController
public class OllamaController {
@Autowired
OllamaChatClient ollamaChatClient;
/**
* 简单问答,直接放回回答的内容
*/
@RequestMapping("/chat")
public String chat(String prompt) {
return ollamaChatClient.call(prompt);
}
/**
* 流式问答,类似于对话框,每次返回一个回答
*/
@RequestMapping("/chat_stream")
public Flux<String> chat_stream(String prompt) {
return ollamaChatClient.stream(prompt);
}
}
测试