Method: Langchain::LLM::OpenAI#initialize

Defined in:
lib/langchain/llm/openai.rb

#initialize(api_key:, llm_options: {}, default_options: {}) ⇒ OpenAI

Initialize an OpenAI LLM instance

Parameters:

  • api_key (String)

    The API key to use

  • client_options (Hash)

    Options to pass to the OpenAI::Client constructor

[View source]

33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# File 'lib/langchain/llm/openai.rb', line 33

def initialize(api_key:, llm_options: {}, default_options: {})
  depends_on "ruby-openai", req: "openai"

  llm_options[:log_errors] = Langchain.logger.debug? unless llm_options.key?(:log_errors)

  @client = ::OpenAI::Client.new(access_token: api_key, **llm_options) do |f|
    f.response :logger, Langchain.logger, {headers: true, bodies: true, errors: true}
  end

  @defaults = DEFAULTS.merge(default_options)
  chat_parameters.update(
    model: {default: @defaults[:chat_model]},
    logprobs: {},
    top_logprobs: {},
    n: {default: @defaults[:n]},
    temperature: {default: @defaults[:temperature]},
    user: {},
    response_format: {default: @defaults[:response_format]}
  )
  chat_parameters.ignore(:top_k)
end