class Client
Represents a connection to the Ollama service.
Definitions
ENDPOINT = Async::HTTP::Endpoint.parse("http://localhost:11434")
The default endpoint to connect to.
def generate(prompt, **options, &block)
Generate a response from the given prompt.
Signature
-
parameter
prompt
String
The prompt to generate a response from.
Implementation
def generate(prompt, **options, &block)
options[:prompt] = prompt
options[:model] ||= "llama3"
Generate.post(self.with(path: "/api/generate"), options) do |resource, response|
if block_given?
yield response
end
Generate.new(resource, value: response.read, metadata: response.headers)
end
end