Class: LLM::OpenAI::Responses
- Inherits:
-
Object
- Object
- LLM::OpenAI::Responses
- Defined in:
- lib/llm/providers/openai/responses.rb
Overview
The LLM::OpenAI::Responses class provides an interface for OpenAI's response API.
Instance Method Summary collapse
-
#create(prompt, params = {}) ⇒ LLM::Response
Create a response.
-
#delete(response) ⇒ LLM::Object
Deletes a response.
-
#get(response, **params) ⇒ LLM::Response
Get a response.
-
#initialize(provider) ⇒ LLM::OpenAI::Responses
constructor
Returns a new Responses object.
Constructor Details
#initialize(provider) ⇒ LLM::OpenAI::Responses
Returns a new Responses object
24 25 26 |
# File 'lib/llm/providers/openai/responses.rb', line 24 def initialize(provider) @provider = provider end |
Instance Method Details
#create(prompt, params = {}) ⇒ LLM::Response
Create a response
37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
# File 'lib/llm/providers/openai/responses.rb', line 37 def create(prompt, params = {}) params = {role: :user, model: @provider.default_model}.merge!(params) tools = resolve_tools(params.delete(:tools)) params = [params, adapt_schema(params), adapt_tools(tools)].inject({}, &:merge!).compact role, stream = params.delete(:role), params.delete(:stream) params[:stream] = true if stream.respond_to?(:<<) || stream == true req = Net::HTTP::Post.new("/v1/responses", headers) = [*(params.delete(:input) || []), LLM::Message.new(role, prompt)] body = LLM.json.dump({input: [adapt(, mode: :response)].flatten}.merge!(params)) set_body_stream(req, StringIO.new(body)) res = execute(request: req, stream:, stream_parser:) ResponseAdapter.adapt(res, type: :responds) .extend(Module.new { define_method(:__tools__) { tools } }) end |
#delete(response) ⇒ LLM::Object
Deletes a response
72 73 74 75 76 77 |
# File 'lib/llm/providers/openai/responses.rb', line 72 def delete(response) response_id = response.respond_to?(:id) ? response.id : response req = Net::HTTP::Delete.new("/v1/responses/#{response_id}", headers) res = execute(request: req) LLM::Response.new(res) end |
#get(response, **params) ⇒ LLM::Response
Get a response
58 59 60 61 62 63 64 |
# File 'lib/llm/providers/openai/responses.rb', line 58 def get(response, **params) response_id = response.respond_to?(:id) ? response.id : response query = URI.encode_www_form(params) req = Net::HTTP::Get.new("/v1/responses/#{response_id}?#{query}", headers) res = execute(request: req) ResponseAdapter.adapt(res, type: :responds) end |