Class: Langchain::LLM::GoogleGemini
- Defined in:
- lib/langchain/llm/google_gemini.rb
Overview
Constant Summary collapse
- DEFAULTS =
{ chat_model: "gemini-1.5-pro-latest", embedding_model: "text-embedding-004", temperature: 0.0 }
Instance Attribute Summary collapse
-
#api_key ⇒ Object
readonly
Returns the value of attribute api_key.
-
#defaults ⇒ Object
readonly
Returns the value of attribute defaults.
Attributes inherited from Base
Instance Method Summary collapse
-
#chat(params = {}) ⇒ Object
Generate a chat completion for a given prompt.
- #embed(text:, model: ) ⇒ Object
-
#initialize(api_key:, default_options: {}) ⇒ GoogleGemini
constructor
A new instance of GoogleGemini.
Methods inherited from Base
#chat_parameters, #complete, #default_dimension, #default_dimensions, #summarize
Methods included from DependencyHelper
Constructor Details
#initialize(api_key:, default_options: {}) ⇒ GoogleGemini
Returns a new instance of GoogleGemini.
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
# File 'lib/langchain/llm/google_gemini.rb', line 15 def initialize(api_key:, default_options: {}) @api_key = api_key @defaults = DEFAULTS.merge() chat_parameters.update( model: {default: @defaults[:chat_model]}, temperature: {default: @defaults[:temperature]}, generation_config: {default: nil}, safety_settings: {default: @defaults[:safety_settings]} ) chat_parameters.remap( messages: :contents, system: :system_instruction, tool_choice: :tool_config ) end |
Instance Attribute Details
#api_key ⇒ Object (readonly)
Returns the value of attribute api_key.
13 14 15 |
# File 'lib/langchain/llm/google_gemini.rb', line 13 def api_key @api_key end |
#defaults ⇒ Object (readonly)
Returns the value of attribute defaults.
13 14 15 |
# File 'lib/langchain/llm/google_gemini.rb', line 13 def defaults @defaults end |
Instance Method Details
#chat(params = {}) ⇒ Object
Generate a chat completion for a given prompt
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
# File 'lib/langchain/llm/google_gemini.rb', line 39 def chat(params = {}) params[:system] = {parts: [{text: params[:system]}]} if params[:system] params[:tools] = {function_declarations: params[:tools]} if params[:tools] raise ArgumentError.new("messages argument is required") if Array(params[:messages]).empty? parameters = chat_parameters.to_params(params) parameters[:generation_config] ||= {} parameters[:generation_config][:temperature] ||= parameters[:temperature] if parameters[:temperature] parameters.delete(:temperature) parameters[:generation_config][:top_p] ||= parameters[:top_p] if parameters[:top_p] parameters.delete(:top_p) parameters[:generation_config][:top_k] ||= parameters[:top_k] if parameters[:top_k] parameters.delete(:top_k) parameters[:generation_config][:max_output_tokens] ||= parameters[:max_tokens] if parameters[:max_tokens] parameters.delete(:max_tokens) parameters[:generation_config][:response_mime_type] ||= parameters[:response_format] if parameters[:response_format] parameters.delete(:response_format) parameters[:generation_config][:stop_sequences] ||= parameters[:stop] if parameters[:stop] parameters.delete(:stop) uri = URI("https://generativelanguage.googleapis.com/v1beta/models/#{parameters[:model]}:generateContent?key=#{api_key}") parsed_response = http_post(uri, parameters) wrapped_response = Langchain::LLM::GoogleGeminiResponse.new(parsed_response, model: parameters[:model]) if wrapped_response.chat_completion || Array(wrapped_response.tool_calls).any? wrapped_response else raise StandardError.new(parsed_response) end end |
#embed(text:, model: ) ⇒ Object
73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
# File 'lib/langchain/llm/google_gemini.rb', line 73 def ( text:, model: @defaults[:embedding_model] ) params = { content: { parts: [ { text: text } ] } } uri = URI("https://generativelanguage.googleapis.com/v1beta/models/#{model}:embedContent?key=#{api_key}") parsed_response = http_post(uri, params) Langchain::LLM::GoogleGeminiResponse.new(parsed_response, model: model) end |