Method: Langchain::LLM::OpenAI#chat

Defined in:
lib/langchain/llm/openai.rb

#chat(params = {}, &block) ⇒ Object

Generate a chat completion for given messages.

Parameters:

  • params (Hash) (defaults to: {})

    unified chat parmeters from [Langchain::LLM::Parameters::Chat::SCHEMA]

Options Hash (params):

  • :messages (Array<Hash>)

    List of messages comprising the conversation so far

  • :model (String)

    ID of the model to use

Raises:

  • (ArgumentError)
[View source]

120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
# File 'lib/langchain/llm/openai.rb', line 120

def chat(params = {}, &block)
  parameters = chat_parameters.to_params(params)

  raise ArgumentError.new("messages argument is required") if Array(parameters[:messages]).empty?
  raise ArgumentError.new("model argument is required") if parameters[:model].to_s.empty?
  if parameters[:tool_choice] && Array(parameters[:tools]).empty?
    raise ArgumentError.new("'tool_choice' is only allowed when 'tools' are specified.")
  end

  if block
    @response_chunks = []
    parameters[:stream_options] = {include_usage: true}
    parameters[:stream] = proc do |chunk, _bytesize|
      chunk_content = chunk.dig("choices", 0) || {}
      @response_chunks << chunk
      yield chunk_content
    end
  end

  response = with_api_error_handling do
    client.chat(parameters: parameters)
  end

  response = response_from_chunks if block
  reset_response_chunks

  Langchain::LLM::OpenAIResponse.new(response)
end