Module: RubyLLM::Providers::Anthropic::Chat
- Included in:
- RubyLLM::Providers::Anthropic
- Defined in:
- lib/ruby_llm/providers/anthropic/chat.rb
Overview
Chat methods of the OpenAI API integration
Class Method Summary collapse
- .add_optional_fields(payload, system_content:, tools:, temperature:) ⇒ Object
- .build_base_payload(chat_messages, model, stream) ⇒ Object
- .build_message(data, content, tool_use_blocks, response) ⇒ Object
- .build_system_content(system_messages) ⇒ Object
- .completion_url ⇒ Object
- .convert_role(role) ⇒ Object
- .extract_text_content(blocks) ⇒ Object
- .format_basic_message(msg) ⇒ Object
- .format_message(msg) ⇒ Object
- .parse_completion_response(response) ⇒ Object
-
.render_payload(messages, tools:, temperature:, model:, stream: false, schema: nil) ⇒ Object
rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument.
- .separate_messages(messages) ⇒ Object
Class Method Details
.add_optional_fields(payload, system_content:, tools:, temperature:) ⇒ Object
57 58 59 60 61 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 57 def add_optional_fields(payload, system_content:, tools:, temperature:) payload[:tools] = tools.values.map { |t| Tools.function_for(t) } if tools.any? payload[:system] = system_content unless system_content.empty? payload[:temperature] = temperature unless temperature.nil? end |
.build_base_payload(chat_messages, model, stream) ⇒ Object
48 49 50 51 52 53 54 55 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 48 def build_base_payload(, model, stream) { model: model.id, messages: .map { |msg| (msg) }, stream: stream, max_tokens: model.max_tokens || 4096 } end |
.build_message(data, content, tool_use_blocks, response) ⇒ Object
78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 78 def (data, content, tool_use_blocks, response) usage = data['usage'] || {} cached_tokens = usage['cache_read_input_tokens'] cache_creation_tokens = usage['cache_creation_input_tokens'] if cache_creation_tokens.nil? && usage['cache_creation'].is_a?(Hash) cache_creation_tokens = usage['cache_creation'].values.compact.sum end Message.new( role: :assistant, content: content, tool_calls: Tools.parse_tool_calls(tool_use_blocks), input_tokens: usage['input_tokens'], output_tokens: usage['output_tokens'], cached_tokens: cached_tokens, cache_creation_tokens: cache_creation_tokens, model_id: data['model'], raw: response ) end |
.build_system_content(system_messages) ⇒ Object
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 27 def build_system_content() return [] if .empty? if .length > 1 RubyLLM.logger.warn( "Anthropic's Claude implementation only supports a single system message. " \ 'Multiple system messages will be combined into one.' ) end .flat_map do |msg| content = msg.content if content.is_a?(RubyLLM::Content::Raw) content.value else Media.format_content(content) end end end |
.completion_url ⇒ Object
10 11 12 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 10 def completion_url '/v1/messages' end |
.convert_role(role) ⇒ Object
116 117 118 119 120 121 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 116 def convert_role(role) case role when :tool, :user then 'user' else 'assistant' end end |
.extract_text_content(blocks) ⇒ Object
73 74 75 76 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 73 def extract_text_content(blocks) text_blocks = blocks.select { |c| c['type'] == 'text' } text_blocks.map { |c| c['text'] }.join end |
.format_basic_message(msg) ⇒ Object
109 110 111 112 113 114 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 109 def (msg) { role: convert_role(msg.role), content: Media.format_content(msg.content) } end |
.format_message(msg) ⇒ Object
99 100 101 102 103 104 105 106 107 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 99 def (msg) if msg.tool_call? Tools.format_tool_call(msg) elsif msg.tool_result? Tools.format_tool_result(msg) else (msg) end end |
.parse_completion_response(response) ⇒ Object
63 64 65 66 67 68 69 70 71 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 63 def parse_completion_response(response) data = response.body content_blocks = data['content'] || [] text_content = extract_text_content(content_blocks) tool_use_blocks = Tools.find_tool_uses(content_blocks) (data, text_content, tool_use_blocks, response) end |
.render_payload(messages, tools:, temperature:, model:, stream: false, schema: nil) ⇒ Object
rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument
14 15 16 17 18 19 20 21 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 14 def render_payload(, tools:, temperature:, model:, stream: false, schema: nil) # rubocop:disable Metrics/ParameterLists,Lint/UnusedMethodArgument , = () system_content = build_system_content() build_base_payload(, model, stream).tap do |payload| add_optional_fields(payload, system_content:, tools:, temperature:) end end |
.separate_messages(messages) ⇒ Object
23 24 25 |
# File 'lib/ruby_llm/providers/anthropic/chat.rb', line 23 def () .partition { |msg| msg.role == :system } end |