Class: LogStash::Outputs::OpenSearch::HttpClient

Inherits:
Object
  • Object
show all
Defined in:
lib/logstash/outputs/opensearch/http_client.rb,
lib/logstash/outputs/opensearch/http_client/pool.rb,
lib/logstash/outputs/opensearch/http_client/manticore_adapter.rb

Defined Under Namespace

Classes: AWSIAMCredential, ManticoreAdapter, Pool

Constant Summary collapse

AWS_DEFAULT_PORT =
443
AWS_DEFAULT_PROFILE =
'default'
AWS_DEFAULT_PROFILE_CREDENTIAL_RETRY =
0
AWS_DEFAULT_PROFILE_CREDENTIAL_TIMEOUT =
1
AWS_DEFAULT_REGION =
'us-east-1'
AWS_IAM_AUTH_TYPE =
"aws_iam"
AWS_SERVICE =
'es'
BASIC_AUTH_TYPE =
'basic'
DEFAULT_HEADERS =
{ "content-type" => "application/json" }

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ HttpClient

The ‘client_settings` key is a has that can contain other settings:

  • ‘:ssl` - Boolean. Enable or disable SSL/TLS.

  • ‘:proxy` - String. Choose a HTTP HTTProxy to use.

  • ‘:path` - String. The leading path for prefixing OpenSearch

  • ‘:headers` - Hash. Pairs of headers and their values requests. This is sometimes used if you are proxying OpenSearch access through a special http path, such as using mod_rewrite.



46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 46

def initialize(options={})
  @logger = options[:logger]
  @metric = options[:metric]
  @bulk_request_metrics = @metric.namespace(:bulk_requests)
  @bulk_response_metrics = @bulk_request_metrics.namespace(:responses)

  # Again, in case we use DEFAULT_OPTIONS in the future, uncomment this.
  # @options = DEFAULT_OPTIONS.merge(options)
  @options = options

  @url_template = build_url_template

  @pool = build_pool(@options)
  # mutex to prevent requests and sniffing to access the
  # connection pool at the same time
  @bulk_path = @options[:bulk_path]

  @target_bulk_bytes = @options[:target_bulk_bytes]
end

Instance Attribute Details

#action_countObject (readonly)

Returns the value of attribute action_count.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def action_count
  @action_count
end

#clientObject (readonly)

Returns the value of attribute client.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def client
  @client
end

#loggerObject (readonly)

Returns the value of attribute logger.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def logger
  @logger
end

#optionsObject (readonly)

Returns the value of attribute options.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def options
  @options
end

#poolObject (readonly)

Returns the value of attribute pool.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def pool
  @pool
end

#recv_countObject (readonly)

Returns the value of attribute recv_count.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def recv_count
  @recv_count
end

#target_bulk_bytesObject (readonly)

Returns the value of attribute target_bulk_bytes.



19
20
21
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 19

def target_bulk_bytes
  @target_bulk_bytes
end

Instance Method Details

#build_adapter(options) ⇒ Object



304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 304

def build_adapter(options)
  timeout = options[:timeout] || 0
  
  adapter_options = {
    :socket_timeout => timeout,
    :request_timeout => timeout,
  }

  adapter_options[:proxy] = client_settings[:proxy] if client_settings[:proxy]

  adapter_options[:check_connection_timeout] = client_settings[:check_connection_timeout] if client_settings[:check_connection_timeout]

  # Having this explicitly set to nil is an error
  if client_settings[:pool_max]
    adapter_options[:pool_max] = client_settings[:pool_max]
  end

  # Having this explicitly set to nil is an error
  if client_settings[:pool_max_per_route]
    adapter_options[:pool_max_per_route] = client_settings[:pool_max_per_route]
  end

  adapter_options[:ssl] = ssl_options if self.scheme == 'https'

  adapter_options[:headers] = client_settings[:headers] if client_settings[:headers]

  adapter_options[:auth_type] = options[:auth_type]

  adapter_class = ::LogStash::Outputs::OpenSearch::HttpClient::ManticoreAdapter
  adapter = adapter_class.new(@logger, adapter_options)
end

#build_pool(options) ⇒ Object



336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 336

def build_pool(options)
  adapter = build_adapter(options)

  pool_options = {
    :sniffing => sniffing,
    :sniffer_delay => options[:sniffer_delay],
    :sniffing_path => options[:sniffing_path],
    :healthcheck_path => options[:healthcheck_path],
    :resurrect_delay => options[:resurrect_delay],
    :url_normalizer => self.method(:host_to_url),
    :metric => options[:metric],
    :default_server_major_version => options[:default_server_major_version]
  }
  pool_options[:scheme] = self.scheme if self.scheme

  pool_class = ::LogStash::Outputs::OpenSearch::HttpClient::Pool
  full_urls = @options[:hosts].map {|h| host_to_url(h) }
  pool = pool_class.new(@logger, adapter, full_urls, pool_options)
  pool.start
  pool
end

#build_url_templateObject



66
67
68
69
70
71
72
73
74
75
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 66

def build_url_template
  {
    :scheme => self.scheme,
    :user => self.user,
    :password => self.password,
    :host => "URLTEMPLATE",
    :port => self.port,
    :path => self.path
  }
end

#bulk(actions) ⇒ Object



93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 93

def bulk(actions)
  @action_count ||= 0
  @action_count += actions.size
  return if actions.empty?

  bulk_actions = actions.collect do |action, args, source|
    args, source = update_action_builder(args, source) if action == 'update'

    if source && action != 'delete'
      next [ { action => args }, source ]
    else
      next { action => args }
    end
  end

  body_stream = StringIO.new
  if http_compression
    stream_writer = gzip_writer(body_stream)
  else
    stream_writer = body_stream
  end
  bulk_responses = []
  batch_actions = []
  bulk_actions.each_with_index do |action, index|
    as_json = action.is_a?(Array) ?
                action.map {|line| LogStash::Json.dump(line)}.join("\n") :
                LogStash::Json.dump(action)
    as_json << "\n"
    if (stream_writer.pos + as_json.bytesize) > @target_bulk_bytes && stream_writer.pos > 0
      stream_writer.flush # ensure writer has sync'd buffers before reporting sizes
      logger.debug("Sending partial bulk request for batch with one or more actions remaining.",
                   :action_count => batch_actions.size,
                   :payload_size => stream_writer.pos,
                   :content_length => body_stream.size,
                   :batch_offset => (index + 1 - batch_actions.size))

      # Have to close gzip writer before reading from body_stream; otherwise stream doesn't end properly
      # and will cause server side error
      if http_compression
        stream_writer.close
      end

      bulk_responses << bulk_send(body_stream, batch_actions)

      if http_compression
        # Get a new StringIO object and gzip writer
        body_stream = StringIO.new
        stream_writer = gzip_writer(body_stream)
      else
        # Clear existing StringIO object and reuse existing stream writer
        body_stream.truncate(0) && body_stream.seek(0)
      end

      batch_actions.clear
    end
    stream_writer.write(as_json)
    batch_actions << action
  end
  stream_writer.close if http_compression
  logger.debug("Sending final bulk request for batch.",
               :action_count => batch_actions.size,
               :payload_size => stream_writer.pos,
               :content_length => body_stream.size,
               :batch_offset => (actions.size - batch_actions.size))
  bulk_responses << bulk_send(body_stream, batch_actions) if body_stream.size > 0
  body_stream.close if !http_compression
  join_bulk_responses(bulk_responses)
end

#bulk_send(body_stream, batch_actions) ⇒ Object



177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 177

def bulk_send(body_stream, batch_actions)
  params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
  response = @pool.post(@bulk_path, params, body_stream.string)

  @bulk_response_metrics.increment(response.code.to_s)

  case response.code
  when 200 # OK
    LogStash::Json.load(response.body)
  when 413 # Payload Too Large
    logger.warn("Bulk request rejected: `413 Payload Too Large`", :action_count => batch_actions.size, :content_length => body_stream.size)
    emulate_batch_error_response(batch_actions, response.code, 'payload_too_large')
  else
    url = ::LogStash::Util::SafeURI.new(response.final_url)
    raise ::LogStash::Outputs::OpenSearch::HttpClient::Pool::BadResponseCodeError.new(
      response.code, url, body_stream.to_s, response.body
    )
  end
end

#calculate_property(uris, property, default, sniff_check) ⇒ Object



224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 224

def calculate_property(uris, property, default, sniff_check)
  values = uris.map(&property).uniq

  if sniff_check && values.size > 1
    raise LogStash::ConfigurationError, "Cannot have multiple values for #{property} in hosts when sniffing is enabled!"
  end

  uri_value = values.first

  default = nil if default.is_a?(String) && default.empty? # Blanks are as good as nil
  uri_value = nil if uri_value.is_a?(String) && uri_value.empty?

  if default && uri_value && (default != uri_value)
    raise LogStash::ConfigurationError, "Explicit value for '#{property}' was declared, but it is different in one of the URLs given! Please make sure your URLs are inline with explicit values. The URLs have the property set to '#{uri_value}', but it was also set to '#{default}' explicitly"
  end

  uri_value || default
end

#client_settingsObject



292
293
294
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 292

def client_settings
  @options[:client_settings] || {}
end

#closeObject



220
221
222
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 220

def close
  @pool.close
end

#emulate_batch_error_response(actions, http_code, reason) ⇒ Object



197
198
199
200
201
202
203
204
205
206
207
208
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 197

def emulate_batch_error_response(actions, http_code, reason)
  {
      "errors" => true,
      "items" => actions.map do |action|
        action = action.first if action.is_a?(Array)
        request_action, request_parameters = action.first
        {
            request_action => {"status" => http_code, "error" => { "type" => reason }}
        }
      end
  }
end

#exists?(path, use_get = false) ⇒ Boolean

Returns:

  • (Boolean)


392
393
394
395
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 392

def exists?(path, use_get=false)
  response = use_get ? @pool.get(path) : @pool.head(path)
  response.code >= 200 && response.code <= 299
end

#get(path) ⇒ Object



210
211
212
213
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 210

def get(path)
  response = @pool.get(path, nil)
  LogStash::Json.load(response.body)
end

#gzip_writer(io) ⇒ Object



162
163
164
165
166
167
168
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 162

def gzip_writer(io)
  fail(ArgumentError, "Cannot create gzip writer on IO with unread bytes") unless io.eof?
  fail(ArgumentError, "Cannot create gzip writer on non-empty IO") unless io.pos == 0

  io.set_encoding "BINARY"
  Zlib::GzipWriter.new(io, Zlib::DEFAULT_COMPRESSION, Zlib::DEFAULT_STRATEGY)
end

#host_to_url(h) ⇒ Object



358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 358

def host_to_url(h)
  # Never override the calculated scheme
  raw_scheme = @url_template[:scheme] || 'http'

  raw_user = h.user || @url_template[:user]
  raw_password = h.password || @url_template[:password]
  postfixed_userinfo = raw_user && raw_password ? "#{raw_user}:#{raw_password}@" : nil

  raw_host = h.host # Always replace this!
  raw_port =  h.port || @url_template[:port]

  raw_path = !h.path.nil? && !h.path.empty? &&  h.path != "/" ? h.path : @url_template[:path]
  prefixed_raw_path = raw_path && !raw_path.empty? ? raw_path : "/"

  parameters = client_settings[:parameters]
  raw_query = if parameters && !parameters.empty?
                combined = h.query ?
                  Hash[URI::decode_www_form(h.query)].merge(parameters) :
                  parameters
                query_str = combined.flat_map {|k,v|
                  values = Array(v)
                  values.map {|av| "#{k}=#{av}"}
                }.join("&")
                query_str
              else
                h.query
              end
  prefixed_raw_query = raw_query && !raw_query.empty? ? "?#{raw_query}" : nil
  
  raw_url = "#{raw_scheme}://#{postfixed_userinfo}#{raw_host}:#{raw_port}#{prefixed_raw_path}#{prefixed_raw_query}"

  ::LogStash::Util::SafeURI.new(raw_url)
end

#http_compressionObject



300
301
302
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 300

def http_compression
  client_settings.fetch(:http_compression, false)
end

#join_bulk_responses(bulk_responses) ⇒ Object



170
171
172
173
174
175
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 170

def join_bulk_responses(bulk_responses)
  {
    "errors" => bulk_responses.any? {|r| r["errors"] == true},
    "items" => bulk_responses.reduce([]) {|m,r| m.concat(r.fetch("items", []))}
  }
end

#last_versionObject



85
86
87
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 85

def last_version
  @pool.last_version
end

#legacy_template?Boolean

Returns:

  • (Boolean)


407
408
409
410
411
412
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 407

def legacy_template?()
  # TODO: Also check Version and return true for < 7.8 even if :legacy_template=false
  # Need to figure a way to distinguish between OpenSearch, OpenDistro and other 
  # variants, since they have version numbers in different ranges.
  client_settings.fetch(:legacy_template, true)
end

#maximum_seen_major_versionObject



89
90
91
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 89

def maximum_seen_major_version
  @pool.maximum_seen_major_version
end

#passwordObject



251
252
253
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 251

def password
  calculate_property(uris, :password, @options[:password], sniffing)
end

#pathObject



255
256
257
258
259
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 255

def path
  calculated = calculate_property(uris, :path, client_settings[:path], sniffing)
  calculated = "/#{calculated}" if calculated && !calculated.start_with?("/")
  calculated
end

#portObject



281
282
283
284
285
286
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 281

def port
  # We don't set the 'default' here because the default is what the user
  # indicated, so we use an || outside of calculate_property. This lets people
  # Enter things like foo:123, bar and wind up with foo:123, bar:9200
  calculate_property(uris, :port, nil, sniffing) || 9200
end

#post(path, params = {}, body_string) ⇒ Object



215
216
217
218
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 215

def post(path, params = {}, body_string)
  response = @pool.post(path, params, body_string)
  LogStash::Json.load(response.body)
end

#rollover_alias_exists?(name) ⇒ Boolean

check whether rollover alias already exists

Returns:

  • (Boolean)


420
421
422
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 420

def rollover_alias_exists?(name)
  exists?(name)
end

#rollover_alias_put(alias_name, alias_definition) ⇒ Object

Create a new rollover alias



425
426
427
428
429
430
431
432
433
434
435
436
437
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 425

def rollover_alias_put(alias_name, alias_definition)
  begin
    @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
    logger.info("Created rollover alias", name: alias_name)
    # If the rollover alias already exists, ignore the error that comes back from OpenSearch
  rescue ::LogStash::Outputs::OpenSearch::HttpClient::Pool::BadResponseCodeError => e
    if e.response_code == 400
        logger.info("Rollover alias already exists, skipping", name: alias_name)
        return
    end
    raise e
  end
end

#schemeObject



261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 261

def scheme
  explicit_scheme = if ssl_options && ssl_options.has_key?(:enabled)
                      ssl_options[:enabled] ? 'https' : 'http'
                    else
                      nil
                    end
  
  calculated_scheme = calculate_property(uris, :scheme, explicit_scheme, sniffing)

  if calculated_scheme && calculated_scheme !~ /https?/
    raise LogStash::ConfigurationError, "Bad scheme '#{calculated_scheme}' found should be one of http/https"
  end

  if calculated_scheme && explicit_scheme && calculated_scheme != explicit_scheme
    raise LogStash::ConfigurationError, "SSL option was explicitly set to #{ssl_options[:enabled]} but a URL was also declared with a scheme of '#{explicit_scheme}'. Please reconcile this"
  end

  calculated_scheme # May be nil if explicit_scheme is nil!
end

#sniffingObject



243
244
245
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 243

def sniffing
  @options[:sniffing]
end

#ssl_optionsObject



296
297
298
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 296

def ssl_options
  client_settings.fetch(:ssl, {})
end

#template_endpointObject



414
415
416
417
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 414

def template_endpoint
  # https://opensearch.org/docs/opensearch/index-templates/
  legacy_template?() ? '_template' : '_index_template'
end

#template_exists?(name) ⇒ Boolean

Returns:

  • (Boolean)


397
398
399
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 397

def template_exists?(name)
  exists?("/#{template_endpoint}/#{name}")
end

#template_install(name, template, force = false) ⇒ Object



77
78
79
80
81
82
83
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 77

def template_install(name, template, force=false)
  if template_exists?(name) && !force
    @logger.debug("Found existing OpenSearch template, skipping template management", name: name)
    return
  end
  template_put(name, template)
end

#template_put(name, template) ⇒ Object



401
402
403
404
405
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 401

def template_put(name, template)
  path = "/#{template_endpoint}/#{name}"
  logger.info("Installing OpenSearch template", name: name)
  @pool.put(path, nil, LogStash::Json.dump(template))
end

#update_action_builder(args, source) ⇒ Object

Build a bulk item for an opensearch update action



440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 440

def update_action_builder(args, source)
  args = args.clone()
  if args[:_script]
    # Use the event as a hash from your script with variable name defined
    # by script_var_name (default: "event")
    # Ex: event["@timestamp"]
    source_orig = source
    source = { 'script' => {'params' => { @options[:script_var_name] => source_orig }} }
    if @options[:scripted_upsert]
      source['scripted_upsert'] = true
      source['upsert'] = {}
    elsif @options[:doc_as_upsert]
      source['upsert'] = source_orig
    else
      source['upsert'] = args.delete(:_upsert) if args[:_upsert]
    end
    case @options[:script_type]
    when 'indexed'
      source['script']['id'] = args.delete(:_script)
    when 'file'
      source['script']['file'] = args.delete(:_script)
    when 'inline'
      source['script']['inline'] = args.delete(:_script)
    end
    source['script']['lang'] = @options[:script_lang] if @options[:script_lang] != ''
  else
    source = { 'doc' => source }
    if @options[:doc_as_upsert]
      source['doc_as_upsert'] = true
    else
      source['upsert'] = args.delete(:_upsert) if args[:_upsert]
    end
  end
  [args, source]
end

#urisObject



288
289
290
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 288

def uris
  @options[:hosts]
end

#userObject



247
248
249
# File 'lib/logstash/outputs/opensearch/http_client.rb', line 247

def user
  calculate_property(uris, :user, @options[:user], sniffing)
end