ollama run dreamingbumblebee/qwen2.5vl-3b-qlora-ko-1.5k-llm-q4_k_m
curl http://localhost:11434/api/chat \
-d '{
"model": "dreamingbumblebee/qwen2.5vl-3b-qlora-ko-1.5k-llm-q4_k_m",
"messages": [{"role": "user", "content": "Hello!"}]
}'
from ollama import chat
response = chat(
model='dreamingbumblebee/qwen2.5vl-3b-qlora-ko-1.5k-llm-q4_k_m',
messages=[{'role': 'user', 'content': 'Hello!'}],
)
print(response.message.content)
import ollama from 'ollama'
const response = await ollama.chat({
model: 'dreamingbumblebee/qwen2.5vl-3b-qlora-ko-1.5k-llm-q4_k_m',
messages: [{role: 'user', content: 'Hello!'}],
})
console.log(response.message.content)