Class: GenAI::Language::OpenAI

Inherits:
Base
  • Object
show all
Includes:
Api::Format::OpenAI
Defined in:
lib/gen_ai/language/open_ai.rb

Constant Summary collapse

EMBEDDING_MODEL =
'text-embedding-ada-002'
COMPLETION_MODEL =
'gpt-3.5-turbo-1106'

Constants inherited from Base

Base::DEFAULT_ROLE

Instance Method Summary collapse

Methods included from Api::Format::OpenAI

#build_raw_choices, #build_raw_response, #chunk_params_from_streaming, #extract_completions, #extract_embeddings

Methods included from Dependency

#depends_on

Constructor Details

#initialize(token:, options: {}) ⇒ OpenAI

Returns a new instance of OpenAI.



11
12
13
14
15
# File 'lib/gen_ai/language/open_ai.rb', line 11

def initialize(token:, options: {})
  depends_on 'ruby-openai'

  @client = ::OpenAI::Client.new(access_token: token)
end

Instance Method Details

#chat(messages, options = {}, &block) ⇒ Object



29
30
31
32
33
# File 'lib/gen_ai/language/open_ai.rb', line 29

def chat(messages, options = {}, &block)
  parameters = build_chat_options(messages, options)

  block_given? ? chat_streaming_request(parameters, block) : chat_request(parameters)
end

#complete(prompt, options = {}) ⇒ Object



25
26
27
# File 'lib/gen_ai/language/open_ai.rb', line 25

def complete(prompt, options = {})
  chat_request build_completion_options(prompt, options)
end

#embed(input, model: nil) ⇒ Object



17
18
19
20
21
22
23
# File 'lib/gen_ai/language/open_ai.rb', line 17

def embed(input, model: nil)
  parameters = { input: input, model: model || EMBEDDING_MODEL }

  response = handle_errors { client.embeddings(parameters: parameters) }

  build_result(model: parameters[:model], raw: response, parsed: extract_embeddings(response))
end