Class: Durable::Llm::Providers::OpenAI
- Inherits:
-
Base
- Object
- Base
- Durable::Llm::Providers::OpenAI
show all
- Defined in:
- lib/durable/llm/providers/openai.rb
Defined Under Namespace
Classes: OpenAIChoice, OpenAIEmbeddingResponse, OpenAIMessage, OpenAIResponse, OpenAIStreamChoice, OpenAIStreamDelta, OpenAIStreamResponse
Constant Summary
collapse
- BASE_URL =
'https://api.openai.com/v1'
Instance Attribute Summary collapse
Class Method Summary
collapse
Instance Method Summary
collapse
Methods inherited from Base
models, #stream?
Constructor Details
#initialize(api_key: nil, organization: nil) ⇒ OpenAI
Returns a new instance of OpenAI.
19
20
21
22
23
24
25
26
27
|
# File 'lib/durable/llm/providers/openai.rb', line 19
def initialize(api_key: nil, organization: nil)
@api_key = api_key || default_api_key
@organization = organization || ENV['OPENAI_ORGANIZATION']
@conn = Faraday.new(url: BASE_URL) do |faraday|
faraday.request :json
faraday.response :json
faraday.adapter Faraday.default_adapter
end
end
|
Instance Attribute Details
#api_key ⇒ Object
Returns the value of attribute api_key.
17
18
19
|
# File 'lib/durable/llm/providers/openai.rb', line 17
def api_key
@api_key
end
|
#organization ⇒ Object
Returns the value of attribute organization.
17
18
19
|
# File 'lib/durable/llm/providers/openai.rb', line 17
def organization
@organization
end
|
Class Method Details
.stream? ⇒ Boolean
58
59
60
|
# File 'lib/durable/llm/providers/openai.rb', line 58
def self.stream?
true
end
|
Instance Method Details
#completion(options) ⇒ Object
29
30
31
32
33
34
35
36
37
|
# File 'lib/durable/llm/providers/openai.rb', line 29
def completion(options)
response = @conn.post('chat/completions') do |req|
req.['Authorization'] = "Bearer #{@api_key}"
req.['OpenAI-Organization'] = @organization if @organization
req.body = options
end
handle_response(response)
end
|
#default_api_key ⇒ Object
13
14
15
|
# File 'lib/durable/llm/providers/openai.rb', line 13
def default_api_key
Durable::Llm.configuration.openai&.api_key || ENV['OPENAI_API_KEY']
end
|
#embedding(model:, input:, **options) ⇒ Object
39
40
41
42
43
44
45
46
47
|
# File 'lib/durable/llm/providers/openai.rb', line 39
def embedding(model:, input:, **options)
response = @conn.post('embeddings') do |req|
req.['Authorization'] = "Bearer #{@api_key}"
req.['OpenAI-Organization'] = @organization if @organization
req.body = { model: model, input: input, **options }
end
handle_response(response, OpenAIEmbeddingResponse)
end
|
#models ⇒ Object
49
50
51
52
53
54
55
56
|
# File 'lib/durable/llm/providers/openai.rb', line 49
def models
response = @conn.get('models') do |req|
req.['Authorization'] = "Bearer #{@api_key}"
req.['OpenAI-Organization'] = @organization if @organization
end
handle_response(response).data.map { |model| model['id'] }
end
|
#stream(options) ⇒ Object
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
|
# File 'lib/durable/llm/providers/openai.rb', line 62
def stream(options)
options[:stream] = true
response = @conn.post('chat/completions') do |req|
req.['Authorization'] = "Bearer #{@api_key}"
req.['OpenAI-Organization'] = @organization if @organization
req.['Accept'] = 'text/event-stream'
options['temperature'] = options['temperature'].to_f if options['temperature']
req.body = options
user_proc = proc do |chunk, _size, _total|
yield OpenAIStreamResponse.new(chunk)
end
req.options.on_data = to_json_stream(user_proc: user_proc)
end
handle_response(response)
end
|