Module: Tracebook::Mappers

Extended by:
Mappers
Included in:
Mappers
Defined in:
lib/tracebook/mappers.rb,
lib/tracebook/mappers/base.rb,
lib/tracebook/mappers/ollama.rb,
lib/tracebook/mappers/openai.rb,
lib/tracebook/mappers/anthropic.rb

Overview

Mappers normalize provider-specific request/response formats into TraceBook's standard NormalizedInteraction structure.

Built-in mappers exist for OpenAI, Anthropic, and Ollama. For other providers, a fallback mapper is used which preserves the raw payloads.

Examples:

Using the mapper in an adapter

normalized = Tracebook::Mappers.normalize(
  "openai",
  raw_request: { model: "gpt-4o", messages: messages },
  raw_response: openai_response,
  meta: { project: "chatbot", user: current_user, latency_ms: 200 }
)
TraceBook.record!(**normalized.to_h)

Creating a custom mapper

# See {Mappers::Base} for the base class
class Mappers::Cohere < Mappers::Base
  def self.normalize(raw_request:, raw_response:, meta: {})
    # Your normalization logic
    build_interaction(provider: "cohere", model: ..., ...)
  end
end

See Also:

Defined Under Namespace

Classes: Anthropic, Base, Ollama, OpenAI

Instance Method Summary collapse

Instance Method Details

#anthropic_usage(response, key) ⇒ Object (private)



260
261
262
263
# File 'lib/tracebook/mappers.rb', line 260

def anthropic_usage(response, key)
  usage = response[:usage] || {}
  usage.with_indifferent_access[key]&.to_i
end

#extract_anthropic_messages(messages) ⇒ Object (private)

Anthropic helpers



239
240
241
242
243
244
# File 'lib/tracebook/mappers.rb', line 239

def extract_anthropic_messages(messages)
  Array(messages).flat_map do |message|
    message = message.respond_to?(:with_indifferent_access) ? message.with_indifferent_access : message
    extract_blocks(message[:content])
  end.join("\n\n")
end

#extract_blocks(blocks) ⇒ Object (private)



246
247
248
249
250
251
252
253
254
255
256
257
258
# File 'lib/tracebook/mappers.rb', line 246

def extract_blocks(blocks)
  Array(blocks).flat_map do |block|
    block = block.respond_to?(:with_indifferent_access) ? block.with_indifferent_access : block
    case block[:type]
    when "text"
      block[:text]
    when "input_text"
      block[:text]
    else
      nil
    end
  end.compact
end

#fallback_normalized(provider, raw_request, raw_response, meta) ⇒ Object (private)



175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
# File 'lib/tracebook/mappers.rb', line 175

def fallback_normalized(provider, raw_request, raw_response, meta)
  request = symbolize(raw_request || {})
  response = symbolize(raw_response || {})
  meta_info = indifferent_meta(meta)

  Tracebook::NormalizedInteraction.new(
    provider: provider.to_s,
    model: request[:model] || response[:model],
    project: meta_info[:project],
    request_payload: raw_request,
    response_payload: raw_response,
    request_text: meta_info[:request_text],
    response_text: meta_info[:response_text],
    input_tokens: meta_info[:input_tokens],
    output_tokens: meta_info[:output_tokens],
    latency_ms: meta_info[:latency_ms],
    status: meta_info[:status]&.to_sym || :success,
    error_class: meta_info[:error_class],
    error_message: meta_info[:error_message],
    tags: Array(meta_info[:tags]).compact,
    metadata: meta_info[:metadata] || {},
    user: meta_info[:user],
    parent_id: meta_info[:parent_id],
    session_id: meta_info[:session_id]
  )
end

#indifferent_meta(meta) ⇒ Object (private)

Common helpers



273
274
275
# File 'lib/tracebook/mappers.rb', line 273

def indifferent_meta(meta)
  (meta || {}).with_indifferent_access
end

#join_messages(messages) ⇒ Object (private)

OpenAI helpers



203
204
205
# File 'lib/tracebook/mappers.rb', line 203

def join_messages(messages)
  Array(messages).map { |message| message.with_indifferent_access[:content].to_s }.reject(&:empty?).join("\n\n")
end

#normalize(provider, raw_request:, raw_response:, meta: {}) ⇒ NormalizedInteraction

Normalizes a provider's request/response into standard format.

Routes to provider-specific mappers for OpenAI, Anthropic, and Ollama. Falls back to a generic mapper for unknown providers.

Examples:

Normalizing an OpenAI response

normalized = Tracebook::Mappers.normalize(
  "openai",
  raw_request: {
    model: "gpt-4o",
    messages: [{ role: "user", content: "Hello" }]
  },
  raw_response: {
    choices: [{ message: { content: "Hi!" } }],
    usage: { prompt_tokens: 10, completion_tokens: 5 }
  },
  meta: { latency_ms: 150, user: current_user }
)

Parameters:

  • provider (String)

    Provider name ("openai", "anthropic", "ollama", etc.)

  • raw_request (Hash)

    The original request sent to the provider

  • raw_response (Hash)

    The original response from the provider

  • meta (Hash) (defaults to: {})

    Additional metadata (project, user, session_id, tags, etc.)

Options Hash (meta:):

  • :project (String)

    Project name for filtering

  • :user (ActiveRecord::Base)

    Associated user

  • :session_id (String)

    Session identifier

  • :parent_id (Integer)

    Parent interaction ID

  • :tags (Array<String>)

    Labels for filtering

  • :latency_ms (Integer)

    Request duration in milliseconds

  • :status (Symbol)

    :success, :error, or :canceled

  • :error_class (String)

    Exception class name (for errors)

  • :error_message (String)

    Exception message (for errors)

Returns:



75
76
77
78
79
80
81
82
83
84
85
86
# File 'lib/tracebook/mappers.rb', line 75

def normalize(provider, raw_request:, raw_response:, meta: {})
  case provider.to_s
  when "openai"
    normalize_openai(raw_request, raw_response, meta)
  when "anthropic"
    normalize_anthropic(raw_request, raw_response, meta)
  when "ollama"
    normalize_ollama(raw_request, raw_response, meta)
  else
    fallback_normalized(provider, raw_request, raw_response, meta)
  end
end

#normalize_anthropic(raw_request, raw_response, meta) ⇒ Object (private)



118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# File 'lib/tracebook/mappers.rb', line 118

def normalize_anthropic(raw_request, raw_response, meta)
  request = symbolize(raw_request || {})
  response = symbolize(raw_response || {})
  meta_info = indifferent_meta(meta)

  Tracebook::NormalizedInteraction.new(
    provider: "anthropic",
    model: request[:model] || response[:model],
    project: meta_info[:project],
    request_payload: raw_request,
    response_payload: raw_response,
    request_text: extract_anthropic_messages(request[:messages]),
    response_text: extract_blocks(response[:content]).join("\n\n"),
    input_tokens: anthropic_usage(response, :input_tokens),
    output_tokens: anthropic_usage(response, :output_tokens),
    latency_ms: meta_info[:latency_ms],
    status: meta_info[:status]&.to_sym || :success,
    error_class: nil,
    error_message: nil,
    tags: Array(meta_info[:tags]).compact,
    metadata: {},
    user: meta_info[:user],
    parent_id: meta_info[:parent_id],
    session_id: meta_info[:session_id]
  )
end

#normalize_ollama(raw_request, raw_response, meta) ⇒ Object (private)



145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# File 'lib/tracebook/mappers.rb', line 145

def normalize_ollama(raw_request, raw_response, meta)
  request = symbolize(raw_request || {})
  response = symbolize(raw_response || {})
  meta_info = indifferent_meta(meta)

   = {}
  ["eval_count"] = response[:eval_count] if response.key?(:eval_count)

  Tracebook::NormalizedInteraction.new(
    provider: "ollama",
    model: request[:model] || response[:model],
    project: meta_info[:project],
    request_payload: raw_request,
    response_payload: raw_response,
    request_text: request[:prompt] || request[:input],
    response_text: response[:response],
    input_tokens: response[:prompt_eval_count],
    output_tokens: response[:eval_count],
    latency_ms: meta_info[:latency_ms] || to_milliseconds(response[:total_duration]),
    status: meta_info[:status]&.to_sym || :success,
    error_class: nil,
    error_message: nil,
    tags: Array(meta_info[:tags]).compact,
    metadata: ,
    user: meta_info[:user],
    parent_id: meta_info[:parent_id],
    session_id: meta_info[:session_id]
  )
end

#normalize_openai(raw_request, raw_response, meta) ⇒ Object (private)



90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
# File 'lib/tracebook/mappers.rb', line 90

def normalize_openai(raw_request, raw_response, meta)
  request = symbolize(raw_request || {})
  response = symbolize(raw_response || {})
   = (response)
  meta_info = indifferent_meta(meta)

  Tracebook::NormalizedInteraction.new(
    provider: "openai",
    model: request[:model] || response[:model],
    project: meta_info[:project],
    request_payload: raw_request,
    response_payload: raw_response,
    request_text: join_messages(request[:messages]),
    response_text: openai_response_text(response),
    input_tokens: openai_usage_tokens(response, :prompt_tokens),
    output_tokens: openai_usage_tokens(response, :completion_tokens),
    latency_ms: meta_info[:latency_ms],
    status: meta_info[:status]&.to_sym || openai_status(response),
    error_class: nil,
    error_message: nil,
    tags: Array(meta_info[:tags]).compact,
    metadata: ,
    user: meta_info[:user],
    parent_id: meta_info[:parent_id],
    session_id: meta_info[:session_id]
  )
end

#openai_first_choice(response) ⇒ Object (private)



207
208
209
210
# File 'lib/tracebook/mappers.rb', line 207

def openai_first_choice(response)
  choices = Array(response[:choices])
  choices.first&.with_indifferent_access || {}
end

#openai_metadata(response) ⇒ Object (private)



223
224
225
226
227
228
# File 'lib/tracebook/mappers.rb', line 223

def (response)
  choice = openai_first_choice(response)
   = {}
  ["finish_reason"] = choice[:finish_reason] if choice[:finish_reason]
  
end

#openai_response_text(response) ⇒ Object (private)



212
213
214
215
216
# File 'lib/tracebook/mappers.rb', line 212

def openai_response_text(response)
  choice = openai_first_choice(response)
  message = choice[:message] || {}
  message.with_indifferent_access[:content].to_s
end

#openai_status(response) ⇒ Object (private)



230
231
232
233
234
235
236
# File 'lib/tracebook/mappers.rb', line 230

def openai_status(response)
  finish_reason = openai_first_choice(response)[:finish_reason]
  return :canceled if finish_reason == "length"
  return :error if finish_reason == "error"

  :success
end

#openai_usage_tokens(response, key) ⇒ Object (private)



218
219
220
221
# File 'lib/tracebook/mappers.rb', line 218

def openai_usage_tokens(response, key)
  usage = response[:usage] || {}
  usage.with_indifferent_access[key]&.to_i
end

#symbolize(hash) ⇒ Object (private)



277
278
279
# File 'lib/tracebook/mappers.rb', line 277

def symbolize(hash)
  hash.deep_dup.transform_keys { |key| key.respond_to?(:to_sym) ? key.to_sym : key }
end

#to_milliseconds(value) ⇒ Object (private)

Ollama helpers



266
267
268
269
270
# File 'lib/tracebook/mappers.rb', line 266

def to_milliseconds(value)
  return unless value

  (value.to_f * 1000).to_i
end