目录
环境
大语言模型: llama-3.1-70b-versatile
Mem0: Empower your AI applications with long-term memory and personalization
Mem0-Key: 需要注册 https://app.mem0.ai/dashboard/api-keys
GROQ_API_KEY: 需要注册 https://console.groq.com/keys
代码
import os
from dotenv import load_dotenv
from langchain_core.prompts import ChatPromptTemplate
from langchain_groq import ChatGroq
from openai import OpenAI
from mem0 import Memory
os.environ['OPENAI_API_KEY'] = 'sk-xxx'
os.environ["GROQ_API_KEY"] = "gsk_xxxxxx"
load_dotenv()
from mem0 import MemoryClient
class PersonalTravelAssistant:
def __init__(self):
self.client = ChatGroq(temperature=0, model="llama-3.1-70b-versatile")
self.memory = MemoryClient(api_key="m0-xxxx")
self.messages = [{"role": "system", "content": "You are a personal AI Assistant."}]
def ask_question(self, question, user_id):
# Fetch previous related memories
previous_memories = self.search_memories(question, user_id=user_id)
prompt = question
if previous_memories:
prompt = f"User input: {question}\n Previous memories: {previous_memories}"
system = "You are a personal AI Assistant.请用中文回答"
prompt = ChatPromptTemplate.from_messages([("system", system), ("human", prompt)])
chain = prompt | self.client
answer = chain.invoke({"question": question})
# Store the question in memory
self.memory.add(question, user_id=user_id)
return answer
def get_memories(self, user_id):
memories = self.memory.get_all(user_id=user_id)
return [m['memory'] for m in memories]
def search_memories(self, query, user_id):
memories = self.memory.search(query, user_id=user_id)
return [m['memory'] for m in memories]
# Usage example
user_id = "traveler_123"
ai_assistant = PersonalTravelAssistant()
def main():
while True:
question = input("Question: ")
if question.lower() in ['q', 'exit']:
print("Exiting...")
break
answer = ai_assistant.ask_question(question, user_id=user_id)
print(f"Answer: {answer}")
memories = ai_assistant.get_memories(user_id=user_id)
print("Memories:")
for memory in memories:
print(f"- {memory}")
print("-----")
if __name__ == "__main__":
main()