Method: Langchain::LLM::OpenAI#chat
- Defined in:
- lib/langchain/llm/openai.rb
permalink #chat(params = {}, &block) ⇒ Object
Generate a chat completion for given messages.
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
# File 'lib/langchain/llm/openai.rb', line 120 def chat(params = {}, &block) parameters = chat_parameters.to_params(params) raise ArgumentError.new("messages argument is required") if Array(parameters[:messages]).empty? raise ArgumentError.new("model argument is required") if parameters[:model].to_s.empty? if parameters[:tool_choice] && Array(parameters[:tools]).empty? raise ArgumentError.new("'tool_choice' is only allowed when 'tools' are specified.") end if block @response_chunks = [] parameters[:stream_options] = {include_usage: true} parameters[:stream] = proc do |chunk, _bytesize| chunk_content = chunk.dig("choices", 0) || {} @response_chunks << chunk yield chunk_content end end response = with_api_error_handling do client.chat(parameters: parameters) end response = response_from_chunks if block reset_response_chunks Langchain::LLM::OpenAIResponse.new(response) end |