ollama run xino/deepquery-1.5b-rl
curl http://localhost:11434/api/chat \
-d '{
"model": "xino/deepquery-1.5b-rl",
"messages": [{"role": "user", "content": "Hello!"}]
}'
from ollama import chat
response = chat(
model='xino/deepquery-1.5b-rl',
messages=[{'role': 'user', 'content': 'Hello!'}],
)
print(response.message.content)
import ollama from 'ollama'
const response = await ollama.chat({
model: 'xino/deepquery-1.5b-rl',
messages: [{role: 'user', content: 'Hello!'}],
})
console.log(response.message.content)