Class: LogStash::Inputs::LogstashInputAzureblob

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/inputs/azureblob.rb

Overview

Logstash input plugin for Azure Blobs

This logstash plugin gathers data from Microsoft Azure Blobs

Constant Summary collapse

MAX =

Constant of max integer

2 ** ([42].pack('i').size * 16 - 2 ) -1
UPDATE_REGISTRY_COUNT =

Update the registry offset each time after this number of entries have been processed

100

Instance Method Summary collapse

Instance Method Details

#acquire_lease(blob_name, retry_times = 60, interval_sec = 1) ⇒ Object

Acquire a lease on a blob item with retries.

By default, it will retry 60 times with 1 second interval.



302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
# File 'lib/logstash/inputs/azureblob.rb', line 302

def acquire_lease(blob_name, retry_times = 60, interval_sec = 1)
  lease = nil;
  retried = 0;
  while lease.nil? do
    begin
      lease = @azure_blob.acquire_blob_lease(@container, blob_name, { :timeout => 60, :duration => @registry_lease_duration })
    rescue StandardError => e
      if(e.type && e.type == 'LeaseAlreadyPresent')
        if (retried > retry_times)
          raise
        end
        retried += 1
        sleep interval_sec
      else
        # Anything else happend other than 'LeaseAlreadyPresent', break the lease. This is a work-around for the behavior that when
        # timeout exception is hit, somehow, a infinite lease will be put on the lock file.
        @azure_blob.break_blob_lease(@container, blob, { :break_period => 30 })
      end
    end
  end #while
  return lease
end

#cleanup_registryObject

Clean up the registry.



414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
# File 'lib/logstash/inputs/azureblob.rb', line 414

def cleanup_registry
  begin
    lease = nil
    lease = acquire_lease(@registry_locker)
    registry_hash = load_registry
    registry_hash.each { | key, registry_item|
      registry_item.reader = nil if registry_item.reader == @reader
    }
    save_registry(registry_hash)
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end #rescue
end

#create_registry(blob_items) ⇒ Object

Create a registry file to coordinate between multiple azure blob inputs.



434
435
436
437
438
439
440
441
442
443
444
445
# File 'lib/logstash/inputs/azureblob.rb', line 434

def create_registry (blob_items)
  registry_hash = Hash.new

  blob_items.each do |blob_item|
      initial_offset = 0
      initial_offset = blob_item.properties[:content_length] if @registry_create_policy == 'resume'
      registry_item = LogStash::Inputs::RegistryItem.new(blob_item.name, blob_item.properties[:etag], nil, initial_offset, 0)
    registry_hash[blob_item.name] = registry_item
  end # each
  save_registry(registry_hash)
  return registry_hash
end

#deserialize_registry_hash(json_string) ⇒ Object

Deserialize registry hash from json string.



250
251
252
253
254
255
256
257
# File 'lib/logstash/inputs/azureblob.rb', line 250

def deserialize_registry_hash (json_string)
  result = Hash.new
  temp_hash = JSON.parse(json_string)
  temp_hash.values.each { |kvp|
    result[kvp['file_path']] = LogStash::Inputs::RegistryItem.new(kvp['file_path'], kvp['etag'], kvp['reader'], kvp['offset'], kvp['gen'])
  }
  return result
end

#enqueue_content(queue, content, header, tail) ⇒ Object

process



217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
# File 'lib/logstash/inputs/azureblob.rb', line 217

def enqueue_content(queue, content, header, tail)
  if (header.nil? || header.length == 0) && (tail.nil? || tail.length == 0)
    #skip some unnecessary copying
    full_content = content
  else
    full_content = ""
    full_content << header unless header.nil? || header.length == 0
    full_content << content
    full_content << tail unless tail.nil? || tail.length == 0
  end
  
  @codec.decode(full_content) do |event|
    decorate(event)
    queue << event
  end 
end

#list_all_blobsObject

List all the blobs in the given container.



260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
# File 'lib/logstash/inputs/azureblob.rb', line 260

def list_all_blobs
  blobs = Set.new []
  continuation_token = NIL
  @blob_list_page_size = 100 if @blob_list_page_size <= 0
  loop do
    # Need to limit the returned number of the returned entries to avoid out of memory exception.
    entries = @azure_blob.list_blobs(@container, { :timeout => 60, :marker => continuation_token, :max_results => @blob_list_page_size })
    entries.each do |entry|
      blobs << entry
    end # each
    continuation_token = entries.continuation_token
    break if continuation_token.empty?
  end # loop
  return blobs
end

#load_registryObject

Load the content of the registry into the registry hash and return it.



448
449
450
451
452
453
# File 'lib/logstash/inputs/azureblob.rb', line 448

def load_registry
  # Get content
  registry_blob, registry_blob_body = @azure_blob.get_blob(@container, @registry_path)
  registry_hash = deserialize_registry_hash(registry_blob_body)
  return registry_hash
end

#on_entry_processed(start_index, content_length, blob_name, new_etag, gen) ⇒ Object



234
235
236
237
238
239
# File 'lib/logstash/inputs/azureblob.rb', line 234

def on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
  @processed_entries = @processed_entries + 1
  if @processed_entries % UPDATE_REGISTRY_COUNT == 0
    request_registry_update(start_index, content_length, blob_name, new_etag, gen)
  end
end

#process(queue) ⇒ Object

Start processing the next item.



152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
# File 'lib/logstash/inputs/azureblob.rb', line 152

def process(queue)
  begin
    @processed_entries = 0
    blob, start_index, gen = register_for_read

    if(!blob.nil?)
      begin
        blob_name = blob.name
        @logger.debug("Processing blob #{blob.name}")
        blob_size = blob.properties[:content_length]
        # Work-around: After returned by get_blob, the etag will contains quotes.
        new_etag = blob.properties[:etag]
        # ~ Work-around

        blob, header = @azure_blob.get_blob(@container, blob_name, {:end_range => (@file_head_bytes-1) }) if header.nil? unless @file_head_bytes.nil? or @file_head_bytes <= 0

        blob, tail = @azure_blob.get_blob(@container, blob_name, {:start_range => blob_size - @file_tail_bytes}) if tail.nil? unless @file_tail_bytes.nil? or @file_tail_bytes <= 0

        if start_index == 0
          # Skip the header since it is already read.
          start_index = @file_head_bytes
        end

        @logger.debug("start index: #{start_index} blob size: #{blob_size}")

        content_length = 0
        blob_reader = BlobReader.new(@logger, @azure_blob, @container, blob_name, file_chunk_size_bytes, start_index, blob_size - 1 - @file_tail_bytes)

        is_json_codec = (defined?(LogStash::Codecs::JSON) == 'constant') && (@codec.is_a? LogStash::Codecs::JSON)
        if is_json_codec
          parser = JsonParser.new(@logger, blob_reader)

          parser.parse(->(json_content) {
            content_length = content_length + json_content.length

            enqueue_content(queue, json_content, header, tail)

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          }, ->(malformed_json) {
            @logger.debug("Skipping #{malformed_json.length} malformed bytes")
            content_length = content_length + malformed_json.length

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          })
        else
          begin
            content, are_more_bytes_available = blob_reader.read

            content_length = content_length + content.length
            enqueue_content(queue, content, header, tail)

            on_entry_processed(start_index, content_length, blob_name, new_etag, gen)
          end until !are_more_bytes_available || content.nil?

        end #if
      ensure
        # Making sure the reader is removed from the registry even when there's exception.
        request_registry_update(start_index, content_length, blob_name, new_etag, gen)
      end # begin
    end # if
  rescue => e
    @logger.error("Oh My, An error occurred. Error:#{e}: Trace: #{e.backtrace}", :exception => e)
  end # begin
end

#raise_gen(registry_hash, file_path) ⇒ Object

Raise generation for blob in registry



277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
# File 'lib/logstash/inputs/azureblob.rb', line 277

def raise_gen(registry_hash, file_path)
  begin
    target_item = registry_hash[file_path]
    begin
      target_item.gen += 1
      # Protect gen from overflow.
      target_item.gen = target_item.gen / 2 if target_item.gen == MAX
    rescue StandardError => e
      @logger.error("Fail to get the next generation for target item #{target_item}.", :exception => e)
      target_item.gen = 0
    end

    min_gen_item = registry_hash.values.min_by { |x| x.gen }
    while min_gen_item.gen > 0
      registry_hash.values.each { |value| 
        value.gen -= 1
      }
      min_gen_item = registry_hash.values.min_by { |x| x.gen }
    end
  end
end

#registerObject



122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# File 'lib/logstash/inputs/azureblob.rb', line 122

def register
  user_agent = "logstash-input-azureblob"
  user_agent << "/" << Gem.latest_spec_for("logstash-input-azureblob").version.to_s
  
  # this is the reader # for this specific instance.
  @reader = SecureRandom.uuid
  @registry_locker = "#{@registry_path}.lock"
 
  # Setup a specific instance of an Azure::Storage::Client
  client = Azure::Storage::Client.create(:storage_account_name => @storage_account_name, :storage_access_key => @storage_access_key, :storage_blob_host => "https://#{@storage_account_name}.blob.#{@endpoint}", :user_agent_prefix => user_agent)
  # Get an azure storage blob service object from a specific instance of an Azure::Storage::Client
  @azure_blob = client.blob_client
  # Add retry filter to the service object
  @azure_blob.with_filter(Azure::Storage::Core::Filter::ExponentialRetryPolicyFilter.new)
end

#register_for_readObject

Return the next blob for reading as well as the start index.



326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
# File 'lib/logstash/inputs/azureblob.rb', line 326

def register_for_read
  begin
    all_blobs = list_all_blobs
    registry = all_blobs.find { |item| item.name.downcase == @registry_path  }
    registry_locker = all_blobs.find { |item| item.name.downcase == @registry_locker }

    candidate_blobs = all_blobs.select { |item| (item.name.downcase != @registry_path) && ( item.name.downcase != @registry_locker ) }
    
    start_index = 0
    gen = 0
    lease = nil

    # Put lease on locker file than the registy file to allow update of the registry as a workaround for Azure Storage Ruby SDK issue # 16.
    # Workaround: https://github.com/Azure/azure-storage-ruby/issues/16
    registry_locker = @azure_blob.create_block_blob(@container, @registry_locker, @reader) if registry_locker.nil?
    lease = acquire_lease(@registry_locker)
    # ~ Workaround

    if(registry.nil?)
      registry_hash = create_registry(candidate_blobs)
    else
      registry_hash = load_registry
    end #if
      
    picked_blobs = Set.new []
    # Pick up the next candidate
    picked_blob = nil
    candidate_blobs.each { |candidate_blob|
      @logger.debug("candidate_blob: #{candidate_blob.name} content length: #{candidate_blob.properties[:content_length]}")
      registry_item = registry_hash[candidate_blob.name]

      # Appending items that doesn't exist in the hash table
      if registry_item.nil?
        registry_item = LogStash::Inputs::RegistryItem.new(candidate_blob.name, candidate_blob.properties[:etag], nil, 0, 0)
        registry_hash[candidate_blob.name] = registry_item
      end # if
      @logger.debug("registry_item offset: #{registry_item.offset}")
      if ((registry_item.offset < candidate_blob.properties[:content_length]) && (registry_item.reader.nil? || registry_item.reader == @reader))
        @logger.debug("candidate_blob picked: #{candidate_blob.name} content length: #{candidate_blob.properties[:content_length]}")
        picked_blobs << candidate_blob
      end
    }

    picked_blob = picked_blobs.min_by { |b| registry_hash[b.name].gen }
    if !picked_blob.nil?
      registry_item = registry_hash[picked_blob.name]
      registry_item.reader = @reader
      registry_hash[picked_blob.name] = registry_item
      start_index = registry_item.offset
      raise_gen(registry_hash, picked_blob.name)
      gen = registry_item.gen
    end #if

    # Save the chnage for the registry
    save_registry(registry_hash)
    
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil;

    return picked_blob, start_index, gen
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}: #{e.backtrace}", :exception => e)
    return nil, nil, nil
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end # rescue
end

#request_registry_update(start_index, content_length, blob_name, new_etag, gen) ⇒ Object



241
242
243
244
245
246
247
# File 'lib/logstash/inputs/azureblob.rb', line 241

def request_registry_update(start_index, content_length, blob_name, new_etag, gen)
  new_offset = start_index
  new_offset = new_offset + content_length unless content_length.nil?
  @logger.debug("New registry offset: #{new_offset}")
  new_registry_item = LogStash::Inputs::RegistryItem.new(blob_name, new_etag, nil, new_offset, gen)
  update_registry(new_registry_item)
end

#run(queue) ⇒ Object

def register



138
139
140
141
142
143
144
145
# File 'lib/logstash/inputs/azureblob.rb', line 138

def run(queue)
  # we can abort the loop if stop? becomes true
  while !stop?
    process(queue)
    @logger.debug("Hitting interval of #{@interval}ms . . .")
    Stud.stoppable_sleep(@interval) { stop? }
  end # loop
end

#save_registry(registry_hash) ⇒ Object

Serialize the registry hash and save it.



456
457
458
459
460
461
462
# File 'lib/logstash/inputs/azureblob.rb', line 456

def save_registry(registry_hash)
  # Serialize hash to json
  registry_hash_json = JSON.generate(registry_hash)

  # Upload registry to blob
  @azure_blob.create_block_blob(@container, @registry_path, registry_hash_json)
end

#stopObject

def run



147
148
149
# File 'lib/logstash/inputs/azureblob.rb', line 147

def stop
  cleanup_registry
end

#update_registry(registry_item) ⇒ Object

Update the registry



396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
# File 'lib/logstash/inputs/azureblob.rb', line 396

def update_registry (registry_item)
  begin
    lease = nil
    lease = acquire_lease(@registry_locker)
    registry_hash = load_registry
    registry_hash[registry_item.file_path] = registry_item
    save_registry(registry_hash)
    @azure_blob.release_blob_lease(@container, @registry_locker, lease)
    lease = nil
  rescue StandardError => e
    @logger.error("Oh My, An error occurred. #{e}:\n#{e.backtrace}", :exception => e)
  ensure
    @azure_blob.release_blob_lease(@container, @registry_locker, lease) unless lease.nil?
    lease = nil
  end #rescue
end