Class: Langchain::LLM::GoogleGemini

Inherits:
Base
  • Object
show all
Defined in:
lib/langchain/llm/google_gemini.rb

Overview

Usage:

llm = Langchain::LLM::GoogleGemini.new(api_key: ENV['GOOGLE_GEMINI_API_KEY'])

Constant Summary collapse

DEFAULTS =
{
  chat_model: "gemini-1.5-pro-latest",
  embedding_model: "text-embedding-004",
  temperature: 0.0
}

Instance Attribute Summary collapse

Attributes inherited from Base

#client

Instance Method Summary collapse

Methods inherited from Base

#chat_parameters, #complete, #default_dimension, #default_dimensions, #summarize

Methods included from DependencyHelper

#depends_on

Constructor Details

#initialize(api_key:, default_options: {}) ⇒ GoogleGemini

Returns a new instance of GoogleGemini.



15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
# File 'lib/langchain/llm/google_gemini.rb', line 15

def initialize(api_key:, default_options: {})
  @api_key = api_key
  @defaults = DEFAULTS.merge(default_options)

  chat_parameters.update(
    model: {default: @defaults[:chat_model]},
    temperature: {default: @defaults[:temperature]},
    generation_config: {default: nil},
    safety_settings: {default: @defaults[:safety_settings]}
  )
  chat_parameters.remap(
    messages: :contents,
    system: :system_instruction,
    tool_choice: :tool_config
  )
end

Instance Attribute Details

#api_keyObject (readonly)

Returns the value of attribute api_key.



13
14
15
# File 'lib/langchain/llm/google_gemini.rb', line 13

def api_key
  @api_key
end

#defaultsObject (readonly)

Returns the value of attribute defaults.



13
14
15
# File 'lib/langchain/llm/google_gemini.rb', line 13

def defaults
  @defaults
end

Instance Method Details

#chat(params = {}) ⇒ Object

Generate a chat completion for a given prompt

Parameters:

  • messages (Array<Hash>)

    List of messages comprising the conversation so far

  • model (String)

    The model to use

  • tools (Array<Hash>)

    A list of Tools the model may use to generate the next response

  • tool_choice (String)

    Specifies the mode in which function calling should execute. If unspecified, the default value will be set to AUTO. Possible values: AUTO, ANY, NONE

  • system (String)

    Developer set system instruction

Raises:

  • (ArgumentError)


39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# File 'lib/langchain/llm/google_gemini.rb', line 39

def chat(params = {})
  params[:system] = {parts: [{text: params[:system]}]} if params[:system]
  params[:tools] = {function_declarations: params[:tools]} if params[:tools]

  raise ArgumentError.new("messages argument is required") if Array(params[:messages]).empty?

  parameters = chat_parameters.to_params(params)
  parameters[:generation_config] ||= {}
  parameters[:generation_config][:temperature] ||= parameters[:temperature] if parameters[:temperature]
  parameters.delete(:temperature)
  parameters[:generation_config][:top_p] ||= parameters[:top_p] if parameters[:top_p]
  parameters.delete(:top_p)
  parameters[:generation_config][:top_k] ||= parameters[:top_k] if parameters[:top_k]
  parameters.delete(:top_k)
  parameters[:generation_config][:max_output_tokens] ||= parameters[:max_tokens] if parameters[:max_tokens]
  parameters.delete(:max_tokens)
  parameters[:generation_config][:response_mime_type] ||= parameters[:response_format] if parameters[:response_format]
  parameters.delete(:response_format)
  parameters[:generation_config][:stop_sequences] ||= parameters[:stop] if parameters[:stop]
  parameters.delete(:stop)

  uri = URI("https://generativelanguage.googleapis.com/v1beta/models/#{parameters[:model]}:generateContent?key=#{api_key}")

  parsed_response = http_post(uri, parameters)

  wrapped_response = Langchain::LLM::GoogleGeminiResponse.new(parsed_response, model: parameters[:model])

  if wrapped_response.chat_completion || Array(wrapped_response.tool_calls).any?
    wrapped_response
  else
    raise StandardError.new(parsed_response)
  end
end

#embed(text:, model: ) ⇒ Object



73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
# File 'lib/langchain/llm/google_gemini.rb', line 73

def embed(
  text:,
  model: @defaults[:embedding_model]
)
  params = {
    content: {
      parts: [
        {
          text: text
        }
      ]
    }
  }

  uri = URI("https://generativelanguage.googleapis.com/v1beta/models/#{model}:embedContent?key=#{api_key}")

  parsed_response = http_post(uri, params)

  Langchain::LLM::GoogleGeminiResponse.new(parsed_response, model: model)
end