用litellm通了,有空再写教程
pip install 'litellm[proxy]'
litellm --model ollama/qwen:0.5b
http://127.0.0.1:4000/
OpenAI Python library
#ollama本身也可以的
from openai import OpenAI
client = OpenAI(
base_url = 'http://localhost:11434/v1',
api_key='ollama', # required, but unused
)
response = client.chat.completions.create(
model="llama2",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Who won the world series in 2020?"},
{"role": "assistant", "content": "The LA Dodgers won in 2020."},
{"role": "user", "content": "Where was it played?"}
]
)
print(response.choices[0].message.content)
from pandasai import SmartD