Method: Langchain::LLM::OpenAI#initialize
- Defined in:
- lib/langchain/llm/openai.rb
permalink #initialize(api_key:, llm_options: {}, default_options: {}) ⇒ OpenAI
Initialize an OpenAI LLM instance
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
# File 'lib/langchain/llm/openai.rb', line 33 def initialize(api_key:, llm_options: {}, default_options: {}) depends_on "ruby-openai", req: "openai" [:log_errors] = Langchain.logger.debug? unless .key?(:log_errors) @client = ::OpenAI::Client.new(access_token: api_key, **) do |f| f.response :logger, Langchain.logger, {headers: true, bodies: true, errors: true} end @defaults = DEFAULTS.merge() chat_parameters.update( model: {default: @defaults[:chat_model]}, logprobs: {}, top_logprobs: {}, n: {default: @defaults[:n]}, temperature: {default: @defaults[:temperature]}, user: {}, response_format: {default: @defaults[:response_format]} ) chat_parameters.ignore(:top_k) end |