This is a simple Elixir wrapper around the Ollama API.
Initialize the client.
client = Ollama.init()
Ollama.generate(client, [
model: "llama3.2",
prompt: "Who is Luke Skywalker?"
])
Ollama.chat(client, [
model: "llama3.2",
messages: [
%{role: "system", content: "You are a helpful assistant."},
%{role: "user", content: "Who is Luke Skywalker?"},
]
])
MIT