Class: Langchain::Assistant

Inherits:
Object
  • Object
show all
Defined in:
lib/langchain/assistant.rb,
lib/langchain/assistant/llm/adapter.rb,
lib/langchain/assistant/messages/base.rb,
lib/langchain/assistant/llm/adapters/base.rb,
lib/langchain/assistant/llm/adapters/ollama.rb,
lib/langchain/assistant/llm/adapters/openai.rb,
lib/langchain/assistant/llm/adapters/anthropic.rb,
lib/langchain/assistant/llm/adapters/mistral_ai.rb,
lib/langchain/assistant/messages/ollama_message.rb,
lib/langchain/assistant/messages/openai_message.rb,
lib/langchain/assistant/llm/adapters/google_gemini.rb,
lib/langchain/assistant/messages/anthropic_message.rb,
lib/langchain/assistant/messages/mistral_ai_message.rb,
lib/langchain/assistant/messages/google_gemini_message.rb

Overview

Assistants are Agent-like objects that leverage helpful instructions, LLMs, tools and knowledge to respond to user queries. Assistants can be configured with an LLM of your choice, any vector search database and easily extended with additional tools.

Usage:

llm = Langchain::LLM::GoogleGemini.new(api_key: ENV["GOOGLE_GEMINI_API_KEY"])
assistant = Langchain::Assistant.new(
  llm: llm,
  instructions: "You're a News Reporter AI",
  tools: [Langchain::Tool::NewsRetriever.new(api_key: ENV["NEWS_API_KEY"])]
)

Defined Under Namespace

Modules: LLM, Messages

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(llm:, tools: [], instructions: nil, tool_choice: "auto", parallel_tool_calls: true, messages: [], add_message_callback: nil, &block) ⇒ Assistant

Create a new assistant

Parameters:

  • llm (Langchain::LLM::Base)

    LLM instance that the assistant will use

  • tools (Array<Langchain::Tool::Base>) (defaults to: [])

    Tools that the assistant has access to

  • instructions (String) (defaults to: nil)

    The system instructions

  • tool_choice (String) (defaults to: "auto")

    Specify how tools should be selected. Options: “auto”, “any”, “none”, or <specific function name>

  • parallel_tool_calls (Boolean) (defaults to: true)

    Whether or not to run tools in parallel

  • messages (Array<Langchain::Assistant::Messages::Base>) (defaults to: [])

    The messages

  • add_message_callback (Proc) (defaults to: nil)

    A callback function (Proc or lambda) that is called when any message is added to the conversation



38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# File 'lib/langchain/assistant.rb', line 38

def initialize(
  llm:,
  tools: [],
  instructions: nil,
  tool_choice: "auto",
  parallel_tool_calls: true,
  messages: [],
  add_message_callback: nil,
  &block
)
  unless tools.is_a?(Array) && tools.all? { |tool| tool.class.singleton_class.included_modules.include?(Langchain::ToolDefinition) }
    raise ArgumentError, "Tools must be an array of objects extending Langchain::ToolDefinition"
  end

  @llm = llm
  @llm_adapter = LLM::Adapter.build(llm)

  # TODO: Validate that it is, indeed, a Proc or lambda
  if !add_message_callback.nil? && !add_message_callback.respond_to?(:call)
    raise ArgumentError, "add_message_callback must be a callable object, like Proc or lambda"
  end
  @add_message_callback = add_message_callback

  self.messages = messages
  @tools = tools
  @parallel_tool_calls = parallel_tool_calls
  self.tool_choice = tool_choice
  self.instructions = instructions
  @block = block
  @state = :ready

  @total_prompt_tokens = 0
  @total_completion_tokens = 0
  @total_tokens = 0
end

Instance Attribute Details

#add_message_callbackObject

Returns the value of attribute add_message_callback.



25
26
27
# File 'lib/langchain/assistant.rb', line 25

def add_message_callback
  @add_message_callback
end

#instructionsObject

Returns the value of attribute instructions.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def instructions
  @instructions
end

#llmObject (readonly)

Returns the value of attribute llm.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def llm
  @llm
end

#llm_adapterObject (readonly)

Returns the value of attribute llm_adapter.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def llm_adapter
  @llm_adapter
end

#messagesObject

Returns the value of attribute messages.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def messages
  @messages
end

#parallel_tool_callsObject

Returns the value of attribute parallel_tool_calls.



25
26
27
# File 'lib/langchain/assistant.rb', line 25

def parallel_tool_calls
  @parallel_tool_calls
end

#stateObject (readonly)

Returns the value of attribute state.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def state
  @state
end

#tool_choiceObject

Returns the value of attribute tool_choice.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def tool_choice
  @tool_choice
end

#toolsObject

Returns the value of attribute tools.



25
26
27
# File 'lib/langchain/assistant.rb', line 25

def tools
  @tools
end

#total_completion_tokensObject (readonly)

Returns the value of attribute total_completion_tokens.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def total_completion_tokens
  @total_completion_tokens
end

#total_prompt_tokensObject (readonly)

Returns the value of attribute total_prompt_tokens.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def total_prompt_tokens
  @total_prompt_tokens
end

#total_tokensObject (readonly)

Returns the value of attribute total_tokens.



15
16
17
# File 'lib/langchain/assistant.rb', line 15

def total_tokens
  @total_tokens
end

Instance Method Details

#add_message(role: "user", content: nil, image_url: nil, tool_calls: [], tool_call_id: nil) ⇒ Array<Langchain::Message>

Add a user message to the messages array

Parameters:

  • role (String) (defaults to: "user")

    The role attribute of the message. Default: “user”

  • content (String) (defaults to: nil)

    The content of the message

  • image_url (String) (defaults to: nil)

    The URL of the image to include in the message

  • tool_calls (Array<Hash>) (defaults to: [])

    The tool calls to include in the message

  • tool_call_id (String) (defaults to: nil)

    The ID of the tool call to include in the message

Returns:

  • (Array<Langchain::Message>)

    The messages



82
83
84
85
86
87
88
89
90
91
92
93
94
# File 'lib/langchain/assistant.rb', line 82

def add_message(role: "user", content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
  message = build_message(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)

  # Call the callback with the message
  add_message_callback.call(message) if add_message_callback # rubocop:disable Style/SafeNavigation

  # Prepend the message to the messages array
  messages << message

  @state = :ready

  messages
end

#add_message_and_run(content: nil, image_url: nil, auto_tool_execution: false) ⇒ Array<Langchain::Message>

Add a user message and run the assistant

Parameters:

  • content (String) (defaults to: nil)

    The content of the message

  • auto_tool_execution (Boolean) (defaults to: false)

    Whether or not to automatically run tools

Returns:

  • (Array<Langchain::Message>)

    The messages



159
160
161
162
# File 'lib/langchain/assistant.rb', line 159

def add_message_and_run(content: nil, image_url: nil, auto_tool_execution: false)
  add_message(content: content, image_url: image_url, role: "user")
  run(auto_tool_execution: auto_tool_execution)
end

#add_message_and_run!(content: nil, image_url: nil) ⇒ Array<Langchain::Message>

Add a user message and run the assistant with automatic tool execution

Parameters:

  • content (String) (defaults to: nil)

    The content of the message

Returns:

  • (Array<Langchain::Message>)

    The messages



168
169
170
# File 'lib/langchain/assistant.rb', line 168

def add_message_and_run!(content: nil, image_url: nil)
  add_message_and_run(content: content, image_url: image_url, auto_tool_execution: true)
end

#add_messages(messages:) ⇒ Array<Langchain::Message>

Add multiple messages

Parameters:

  • messages (Array<Hash>)

    The messages to add

Returns:

  • (Array<Langchain::Message>)

    The messages



124
125
126
127
128
# File 'lib/langchain/assistant.rb', line 124

def add_messages(messages:)
  messages.each do |message_hash|
    add_message(**message_hash.slice(:content, :role, :tool_calls, :tool_call_id))
  end
end

#array_of_message_hashesArray<Hash>

Convert messages to an LLM APIs-compatible array of hashes

Returns:

  • (Array<Hash>)

    Messages as an OpenAI API-compatible array of hashes



99
100
101
102
103
# File 'lib/langchain/assistant.rb', line 99

def array_of_message_hashes
  messages
    .map(&:to_hash)
    .compact
end

#clear_messages!Array

Delete all messages

Returns:

  • (Array)

    Empty messages array



185
186
187
188
# File 'lib/langchain/assistant.rb', line 185

def clear_messages!
  # TODO: If this a bug? Should we keep the "system" message?
  @messages = []
end

#prompt_of_concatenated_messagesObject

Only used by the Assistant when it calls the LLM#complete() method



106
107
108
# File 'lib/langchain/assistant.rb', line 106

def prompt_of_concatenated_messages
  messages.map(&:to_s).join
end

#run(auto_tool_execution: false) ⇒ Array<Langchain::Message>

Run the assistant

Parameters:

  • auto_tool_execution (Boolean) (defaults to: false)

    Whether or not to automatically run tools

Returns:

  • (Array<Langchain::Message>)

    The messages



134
135
136
137
138
139
140
141
142
143
144
145
# File 'lib/langchain/assistant.rb', line 134

def run(auto_tool_execution: false)
  if messages.empty?
    Langchain.logger.warn("#{self.class} - No messages to process")
    @state = :completed
    return
  end

  @state = :in_progress
  @state = handle_state until run_finished?(auto_tool_execution)

  messages
end

#run!Array<Langchain::Message>

Run the assistant with automatic tool execution

Returns:

  • (Array<Langchain::Message>)

    The messages



150
151
152
# File 'lib/langchain/assistant.rb', line 150

def run!
  run(auto_tool_execution: true)
end

#submit_tool_output(tool_call_id:, output:) ⇒ Array<Langchain::Message>

Submit tool output

Parameters:

  • tool_call_id (String)

    The ID of the tool call to submit output for

  • output (String)

    The output of the tool

Returns:

  • (Array<Langchain::Message>)

    The messages



177
178
179
180
# File 'lib/langchain/assistant.rb', line 177

def submit_tool_output(tool_call_id:, output:)
  # TODO: Validate that `tool_call_id` is valid by scanning messages and checking if this tool call ID was invoked
  add_message(role: @llm_adapter.tool_role, content: output, tool_call_id: tool_call_id)
end