ollama run lucas2024/vecteus-v1:q5_k_m
curl http://localhost:11434/api/chat \
-d '{
"model": "lucas2024/vecteus-v1:q5_k_m",
"messages": [{"role": "user", "content": "Hello!"}]
}'
from ollama import chat
response = chat(
model='lucas2024/vecteus-v1:q5_k_m',
messages=[{'role': 'user', 'content': 'Hello!'}],
)
print(response.message.content)
import ollama from 'ollama'
const response = await ollama.chat({
model: 'lucas2024/vecteus-v1:q5_k_m',
messages: [{role: 'user', content: 'Hello!'}],
})
console.log(response.message.content)