Class: LLM::Clients::OpenAI

Inherits:
Object
  • Object
show all
Includes:
HTTParty
Defined in:
lib/llm/clients/open_ai.rb

Instance Method Summary collapse

Constructor Details

#initialize(llm:) ⇒ OpenAI

Returns a new instance of OpenAI.



10
11
12
# File 'lib/llm/clients/open_ai.rb', line 10

def initialize(llm:)
  @llm = llm
end

Instance Method Details

#chat(messages, options = {}) ⇒ Object



14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# File 'lib/llm/clients/open_ai.rb', line 14

def chat(messages, options = {})
  parameters = {
    model: @llm.canonical_name,
    messages: messages,
    temperature: options[:temperature],
    response_format: options[:response_format],
    max_tokens: options[:max_output_tokens],
    top_p: options[:top_p],
    stop: options[:stop_sequences],
    presence_penalty: options[:presence_penalty],
    frequency_penalty: options[:frequency_penalty],
    tools: options[:tools],
    tool_choice: options[:tool_choice]
  }.compact

  return chat_streaming(parameters, options[:on_message], options[:on_complete]) if options[:stream]

  resp = post_url("/chat/completions", body: parameters.to_json)

  Response.new(resp).to_normalized_response
end