Class: MistralAPI
Instance Method Summary collapse
- #create_chat_completion(model:, messages:, temperature: 0.7, top_p: 1, max_tokens: nil, stream: false, safe_prompt: false, random_seed: nil) ⇒ Object
- #create_embeddings(model:, input:, encoding_format: "float") ⇒ Object
-
#initialize(api_key: ENV["MISTRAL_API_KEY"], base_uri: "https://api.mistral.ai/v1") ⇒ MistralAPI
constructor
A new instance of MistralAPI.
- #list_available_models ⇒ Object
Constructor Details
#initialize(api_key: ENV["MISTRAL_API_KEY"], base_uri: "https://api.mistral.ai/v1") ⇒ MistralAPI
Returns a new instance of MistralAPI.
14 15 16 17 18 19 20 21 |
# File 'lib/mistral_rb.rb', line 14 def initialize(api_key: ENV["MISTRAL_API_KEY"], base_uri: "https://api.mistral.ai/v1") raise 'API key not found. Please set the MISTRAL_API_KEY environment variable.' if api_key.nil? || api_key.empty? @headers = { "Authorization" => "Bearer #{api_key}", "Content-Type" => "application/json" } self.class.base_uri base_uri end |
Instance Method Details
#create_chat_completion(model:, messages:, temperature: 0.7, top_p: 1, max_tokens: nil, stream: false, safe_prompt: false, random_seed: nil) ⇒ Object
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
# File 'lib/mistral_rb.rb', line 23 def create_chat_completion(model:, messages:, temperature: 0.7, top_p: 1, max_tokens: nil, stream: false, safe_prompt: false, random_seed: nil) body = { model: model, messages: , temperature: temperature, top_p: top_p, max_tokens: max_tokens, stream: stream, safe_prompt: safe_prompt, random_seed: random_seed }.compact.to_json if stream # Use on_data callback for streaming self.class.post("/chat/completions", body: body, headers: @headers, stream_body: true) do |fragment, _, _| processed_chunk = handle_stream_chunk(fragment) yield(processed_chunk) if block_given? && processed_chunk end else # Handle non-streaming response response = self.class.post("/chat/completions", body: body, headers: @headers) parsed_response = handle_response(response) MistralModels::CompletionResponse.new(parsed_response) end end |
#create_embeddings(model:, input:, encoding_format: "float") ⇒ Object
49 50 51 52 53 54 55 56 57 58 59 |
# File 'lib/mistral_rb.rb', line 49 def (model:, input:, encoding_format: "float") body = { model: model, input: input, encoding_format: encoding_format }.to_json response = self.class.post("/embeddings", body: body, headers: @headers) parsed_response = handle_response(response) MistralModels::EmbeddingResponse.new(parsed_response) end |
#list_available_models ⇒ Object
61 62 63 64 65 |
# File 'lib/mistral_rb.rb', line 61 def list_available_models response = self.class.get("/models", headers: @headers) parsed_response = handle_response(response) MistralModels::ModelListResponse.new(parsed_response) end |