Class: Mistral::Client
- Inherits:
-
ClientBase
- Object
- ClientBase
- Mistral::Client
- Defined in:
- lib/mistral/client.rb
Overview
Synchronous wrapper around the async client
Instance Attribute Summary
Attributes inherited from ClientBase
#api_key, #endpoint, #max_retries, #timeout
Instance Method Summary collapse
-
#chat(messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil) ⇒ ChatCompletionResponse
A chat endpoint that returns a single response.
-
#chat_stream(messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil) ⇒ Enumerator<ChatCompletionStreamResponse>
A chat endpoint that streams responses.
-
#completion(model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil) ⇒ ChatCompletionResponse
A completion endpoint that returns a single response.
-
#completion_stream(model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil) ⇒ Enumerator<ChatCompletionStreamResponse>
An asynchronous completion endpoint that streams responses.
-
#embeddings(model:, input:) ⇒ EmbeddingResponse
An embeddings endpoint that returns embeddings for a single, or batch of inputs.
-
#initialize(api_key: nil, endpoint: ENDPOINT, max_retries: 5, timeout: 120) ⇒ Client
constructor
A new instance of Client.
-
#list_models ⇒ ModelList
Returns a list of the available models.
Constructor Details
#initialize(api_key: nil, endpoint: ENDPOINT, max_retries: 5, timeout: 120) ⇒ Client
Returns a new instance of Client.
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
# File 'lib/mistral/client.rb', line 8 def initialize( api_key: nil, endpoint: ENDPOINT, max_retries: 5, timeout: 120 ) super(endpoint: endpoint, api_key: api_key, max_retries: max_retries, timeout: timeout) @client = HTTP.persistent(ENDPOINT) .follow .timeout(timeout) .use(:line_iterable_body) .headers('Accept' => 'application/json', 'User-Agent' => "mistral-client-ruby/#{VERSION}", 'Authorization' => "Bearer #{@api_key}", 'Content-Type' => 'application/json' ) end |
Instance Method Details
#chat(messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil) ⇒ ChatCompletionResponse
A chat endpoint that returns a single response.
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
# File 'lib/mistral/client.rb', line 43 def chat( messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil ) request = make_chat_request( messages: , model: model, tools: tools, temperature: temperature, max_tokens: max_tokens, top_p: top_p, random_seed: random_seed, stream: false, safe_prompt: safe_mode || safe_prompt, tool_choice: tool_choice, response_format: response_format ) single_response = request('post', 'v1/chat/completions', json: request) single_response.each do |response| return ChatCompletionResponse.new(response) end raise Mistral::Error.new(message: 'No response received') end |
#chat_stream(messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil) ⇒ Enumerator<ChatCompletionStreamResponse>
A chat endpoint that streams responses.
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
# File 'lib/mistral/client.rb', line 95 def chat_stream( messages:, model: nil, tools: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, safe_mode: false, safe_prompt: false, tool_choice: nil, response_format: nil ) request = make_chat_request( messages: , model: model, tools: tools, temperature: temperature, max_tokens: max_tokens, top_p: top_p, random_seed: random_seed, stream: true, safe_prompt: safe_mode || safe_prompt, tool_choice: tool_choice, response_format: response_format ) Enumerator.new do |yielder| request('post', 'v1/chat/completions', json: request, stream: true).each do |json_response| yielder << ChatCompletionStreamResponse.new(**json_response) end end end |
#completion(model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil) ⇒ ChatCompletionResponse
A completion endpoint that returns a single response.
173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 |
# File 'lib/mistral/client.rb', line 173 def completion( model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil ) request = make_completion_request( prompt:, model:, suffix:, temperature:, max_tokens:, top_p:, random_seed:, stop: ) single_response = request('post', 'v1/fim/completions', json: request, stream: false) single_response.each do |response| return ChatCompletionResponse.new(**response) end raise Error, 'No response received' end |
#completion_stream(model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil) ⇒ Enumerator<ChatCompletionStreamResponse>
An asynchronous completion endpoint that streams responses.
208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 |
# File 'lib/mistral/client.rb', line 208 def completion_stream( model:, prompt:, suffix: nil, temperature: nil, max_tokens: nil, top_p: nil, random_seed: nil, stop: nil ) request = make_completion_request( prompt:, model:, suffix:, temperature:, max_tokens:, top_p:, random_seed:, stop:, stream: true ) response = request('post', 'v1/fim/completions', json: request, stream: true) response.lazy.map do |json_streamed_response| ChatCompletionStreamResponse.new(**json_streamed_response) end end |
#embeddings(model:, input:) ⇒ EmbeddingResponse
An embeddings endpoint that returns embeddings for a single, or batch of inputs
136 137 138 139 140 141 142 143 144 145 |
# File 'lib/mistral/client.rb', line 136 def (model:, input:) request = { model: model, input: input } singleton_response = request('post', 'v1/embeddings', json: request) singleton_response.each do |response| return EmbeddingResponse.new(response) end raise Mistral::Error.new(message: 'No response received') end |
#list_models ⇒ ModelList
Returns a list of the available models
151 152 153 154 155 156 157 158 159 |
# File 'lib/mistral/client.rb', line 151 def list_models singleton_response = request('get', 'v1/models') singleton_response.each do |response| return ModelList.new(response) end raise Mistral::Error.new(message: 'No response received') end |