Class: Langchain::LLM::MistralAI

Inherits:
Base
  • Object
show all
Defined in:
lib/langchain/llm/mistral_ai.rb

Overview

Gem requirements:

gem "mistral-ai"

Usage:

llm = Langchain::LLM::MistralAI.new(api_key: ENV["MISTRAL_AI_API_KEY"])

Constant Summary collapse

DEFAULTS =
{
  chat_model: "mistral-large-latest",
  embedding_model: "mistral-embed"
}.freeze

Instance Attribute Summary collapse

Attributes inherited from Base

#client

Instance Method Summary collapse

Methods inherited from Base

#chat_parameters, #complete, #default_dimension, #default_dimensions, #summarize

Methods included from DependencyHelper

#depends_on

Constructor Details

#initialize(api_key:, default_options: {}) ⇒ MistralAI

Returns a new instance of MistralAI.



17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# File 'lib/langchain/llm/mistral_ai.rb', line 17

def initialize(api_key:, default_options: {})
  depends_on "mistral-ai"

  @client = Mistral.new(
    credentials: {api_key: api_key},
    options: {server_sent_events: true}
  )

  @defaults = DEFAULTS.merge(default_options)
  chat_parameters.update(
    model: {default: @defaults[:chat_model]},
    n: {default: @defaults[:n]},
    safe_prompt: {},
    temperature: {default: @defaults[:temperature]},
    response_format: {default: @defaults[:response_format]}
  )
  chat_parameters.remap(seed: :random_seed)
  chat_parameters.ignore(:n, :top_k)
end

Instance Attribute Details

#defaultsObject (readonly)

Returns the value of attribute defaults.



15
16
17
# File 'lib/langchain/llm/mistral_ai.rb', line 15

def defaults
  @defaults
end

Instance Method Details

#chat(params = {}) ⇒ Object



37
38
39
40
41
42
43
# File 'lib/langchain/llm/mistral_ai.rb', line 37

def chat(params = {})
  parameters = chat_parameters.to_params(params)

  response = client.chat_completions(parameters)

  Langchain::LLM::MistralAIResponse.new(response.to_h)
end

#embed(text:, model: defaults[:embedding_model], encoding_format: nil) ⇒ Object



45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# File 'lib/langchain/llm/mistral_ai.rb', line 45

def embed(
  text:,
  model: defaults[:embedding_model],
  encoding_format: nil
)
  params = {
    input: text,
    model: model
  }
  params[:encoding_format] = encoding_format if encoding_format

  response = client.embeddings(params)

  Langchain::LLM::MistralAIResponse.new(response.to_h)
end