Class: LLM::Anthropic

Inherits:
Provider show all
Defined in:
lib/llm/providers/anthropic.rb,
lib/llm/providers/anthropic/files.rb,
lib/llm/providers/anthropic/models.rb,
lib/llm/providers/anthropic/error_handler.rb,
lib/llm/providers/anthropic/stream_parser.rb,
lib/llm/providers/anthropic/request_adapter.rb,
lib/llm/providers/anthropic/response_adapter.rb

Overview

The Anthropic class implements a provider for Anthropic.

Examples:

#!/usr/bin/env ruby
require "llm"

llm = LLM.anthropic(key: ENV["KEY"])
bot = LLM::Bot.new(llm)
bot.chat ["Tell me about this photo", File.open("/images/dog.jpg", "rb")]
bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }

Defined Under Namespace

Classes: Files, Models

Constant Summary collapse

HOST =
"api.anthropic.com"

Instance Method Summary collapse

Methods inherited from Provider

#audio, #chat, clients, #developer_role, #embed, #images, #inspect, #moderations, #respond, #responses, #schema, #server_tool, #system_role, #user_role, #vector_stores, #with

Constructor Details

#initializeAnthropic



29
30
31
# File 'lib/llm/providers/anthropic.rb', line 29

def initialize(**)
  super(host: HOST, **)
end

Instance Method Details

#assistant_roleString



69
70
71
# File 'lib/llm/providers/anthropic.rb', line 69

def assistant_role
  "assistant"
end

#complete(prompt, params = {}) ⇒ LLM::Response

Provides an interface to the chat completions API

Examples:

llm = LLM.openai(key: ENV["KEY"])
messages = [{role: "system", content: "Your task is to answer all of my questions"}]
res = llm.complete("5 + 2 ?", messages:)
print "[#{res.choices[0].role}]", res.choices[0].content, "\n"

Raises:

See Also:



43
44
45
46
47
48
49
# File 'lib/llm/providers/anthropic.rb', line 43

def complete(prompt, params = {})
  params, stream, tools, role = normalize_complete_params(params)
  req = build_complete_request(prompt, params, role)
  res = execute(request: req, stream: stream)
  ResponseAdapter.adapt(res, type: :completion)
    .extend(Module.new { define_method(:__tools__) { tools } })
end

#default_modelString

Returns the default model for chat completions



77
78
79
# File 'lib/llm/providers/anthropic.rb', line 77

def default_model
  "claude-sonnet-4-20250514"
end

#filesLLM::Anthropic::Files

Provides an interface to Anthropic's files API

See Also:



63
64
65
# File 'lib/llm/providers/anthropic.rb', line 63

def files
  LLM::Anthropic::Files.new(self)
end

#modelsLLM::Anthropic::Models

Provides an interface to Anthropic's models API



55
56
57
# File 'lib/llm/providers/anthropic.rb', line 55

def models
  LLM::Anthropic::Models.new(self)
end

#server_toolsString => LLM::ServerTool

Note:

This method includes certain tools that require configuration through a set of options that are easier to set through the LLM::Provider#server_tool method.

See Also:



88
89
90
91
92
93
94
# File 'lib/llm/providers/anthropic.rb', line 88

def server_tools
  {
    bash: server_tool(:bash, type: "bash_20250124"),
    web_search: server_tool(:web_search, type: "web_search_20250305", max_uses: 5),
    text_editor: server_tool(:str_replace_based_edit_tool, type: "text_editor_20250728", max_characters: 10_000)
  }
end

#web_search(query:) ⇒ LLM::Response

A convenience method for performing a web search using the Anthropic web search tool.

Examples:

llm = LLM.anthropic(key: ENV["KEY"])
res = llm.web_search(query: "summarize today's news")
res.search_results.each { |item| print item.title, ": ", item.url, "\n" }


105
106
107
# File 'lib/llm/providers/anthropic.rb', line 105

def web_search(query:)
  ResponseAdapter.adapt(complete(query, tools: [server_tools[:web_search]]), type: :web_search)
end