Class: Anthropic::Resources::Beta::Messages

Inherits:
Object
  • Object
show all
Defined in:
lib/anthropic/resources/beta/messages.rb,
lib/anthropic/resources/beta/messages/batches.rb

Defined Under Namespace

Classes: Batches

Instance Attribute Summary collapse

Instance Method Summary collapse

Instance Attribute Details

#batchesAnthropic::Resources::Beta::Messages::Batches (readonly)



8
9
10
# File 'lib/anthropic/resources/beta/messages.rb', line 8

def batches
  @batches
end

Instance Method Details

#count_tokens(messages:, model:, context_management: nil, mcp_servers: nil, output_config: nil, output_format: nil, speed: nil, system_: nil, thinking: nil, tool_choice: nil, tools: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessageTokensCount

Some parameter documentations has been truncated, see Models::Beta::MessageCountTokensParams for more details.

Count the number of tokens in a Message.

The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.

Learn more about token counting in our user guide

Parameters:

Returns:

See Also:



341
342
343
344
345
346
347
348
349
350
351
352
353
354
# File 'lib/anthropic/resources/beta/messages.rb', line 341

def count_tokens(params)
  parsed, options = Anthropic::Beta::MessageCountTokensParams.dump_request(params)
  Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true)

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages/count_tokens?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessageTokensCount,
    options: options
  )
end

#create(max_tokens:, messages:, model:, container: nil, context_management: nil, inference_geo: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, speed: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Models::Beta::BetaMessage

See #stream_raw for streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our user guide

Parameters:

Returns:

See Also:



84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
# File 'lib/anthropic/resources/beta/messages.rb', line 84

def create(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#stream` for the streaming use case."
    raise ArgumentError.new(message)
  end

  warn_thinking_enabled(parsed)

  tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(
    parsed,
    strict: nil,
    is_beta: true
  )

  unwrap = ->(raw) { Anthropic::Helpers::Messages.parse_input_schemas!(raw, tools:, models:) }

  if options.empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS
    model = parsed[:model]&.to_sym
    max_tokens = parsed[:max_tokens].to_i
    timeout = @client.calculate_nonstreaming_timeout(
      max_tokens,
      Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model]
    )
    options = {timeout: timeout}
  else
    options = {timeout: 600, **options}
  end

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessage,
    unwrap: unwrap,
    options: options
  )
end

#stream_raw(max_tokens:, messages:, model:, container: nil, context_management: nil, inference_geo: nil, mcp_servers: nil, metadata: nil, output_config: nil, output_format: nil, service_tier: nil, speed: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our user guide

Parameters:

Returns:

See Also:



188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
# File 'lib/anthropic/resources/beta/messages.rb', line 188

def stream(params)
  parsed, options = Anthropic::Models::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)

  warn_thinking_enabled(parsed)

  tools, models = Anthropic::Helpers::Messages.distill_input_schema_models!(
    parsed,
    strict: nil,
    is_beta: true
  )

  header_params = {betas: "anthropic-beta"}
  raw_stream = @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: stream_headers(
      "accept" => "text/event-stream",
      **parsed.slice(*header_params.keys)
    ).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: {timeout: 600, **options}
  )
  Anthropic::Streaming::MessageStream.new(raw_stream:, tools:, models:)
end

#stream_raw(max_tokens:, messages:, model:, container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {}) ⇒ Anthropic::Internal::Stream<Anthropic::Models::Beta::BetaRawMessageStartEvent, Anthropic::Models::Beta::BetaRawMessageDeltaEvent, Anthropic::Models::Beta::BetaRawMessageStopEvent, Anthropic::Models::Beta::BetaRawContentBlockStartEvent, Anthropic::Models::Beta::BetaRawContentBlockDeltaEvent, Anthropic::Models::Beta::BetaRawContentBlockStopEvent>

See #create for non-streaming counterpart.

Some parameter documentations has been truncated, see Models::Beta::MessageCreateParams for more details.

Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.

The Messages API can be used for either single queries or stateless multi-turn conversations.

Learn more about the Messages API in our user guide

Parameters:

Returns:

See Also:



272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
# File 'lib/anthropic/resources/beta/messages.rb', line 272

def stream_raw(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  unless parsed.fetch(:stream, true)
    message = "Please use `#create` for the non-streaming use case."
    raise ArgumentError.new(message)
  end
  parsed.store(:stream, true)

  warn_thinking_enabled(parsed)

  Anthropic::Helpers::Messages.distill_input_schema_models!(parsed, strict: nil, is_beta: true)

  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: stream_headers(
      "accept" => "text/event-stream",
      **parsed.slice(*header_params.keys)
    ).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    stream: Anthropic::Internal::Stream,
    model: Anthropic::Beta::BetaRawMessageStreamEvent,
    options: {timeout: 600, **options}
  )
end

#tool_runner(params) ⇒ Anthropic::Helpers::Tools::Runner



13
14
15
16
17
18
19
# File 'lib/anthropic/resources/beta/messages.rb', line 13

def tool_runner(params)
  params = params.to_h
  warn_thinking_enabled(params)
  max_iterations = params.delete(:max_iterations)
  compaction_control = params.delete(:compaction_control)
  Anthropic::Helpers::Tools::Runner.new(@client, params:, max_iterations:, compaction_control:)
end