Class: Langchain::LLM::MistralAIResponse

Inherits:
BaseResponse show all
Defined in:
lib/langchain/llm/response/mistral_ai_response.rb

Instance Attribute Summary

Attributes inherited from BaseResponse

#context, #raw_response

Instance Method Summary collapse

Methods inherited from BaseResponse

#chat_completions, #completion, #completions, #embeddings, #initialize

Constructor Details

This class inherits a constructor from Langchain::LLM::BaseResponse

Instance Method Details

#chat_completionObject



9
10
11
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 9

def chat_completion
  raw_response.dig("choices", 0, "message", "content")
end

#completion_tokensObject



29
30
31
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 29

def completion_tokens
  raw_response.dig("usage", "completion_tokens")
end

#created_atObject



33
34
35
36
37
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 33

def created_at
  if raw_response.dig("created_at")
    Time.at(raw_response.dig("created_at"))
  end
end

#embeddingObject



17
18
19
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 17

def embedding
  raw_response.dig("data", 0, "embedding")
end

#modelObject



5
6
7
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 5

def model
  raw_response["model"]
end

#prompt_tokensObject



21
22
23
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 21

def prompt_tokens
  raw_response.dig("usage", "prompt_tokens")
end

#roleObject



13
14
15
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 13

def role
  raw_response.dig("choices", 0, "message", "role")
end

#total_tokensObject



25
26
27
# File 'lib/langchain/llm/response/mistral_ai_response.rb', line 25

def total_tokens
  raw_response.dig("usage", "total_tokens")
end