Class: Fluent::Plugin::TailInput

Inherits:
Input
  • Object
show all
Includes:
GroupWatch
Defined in:
lib/fluent/plugin/in_tail.rb,
lib/fluent/plugin/in_tail/group_watch.rb,
lib/fluent/plugin/in_tail/position_file.rb

Defined Under Namespace

Modules: GroupWatch, GroupWatchParams Classes: Entry, FilePositionEntry, GroupWatcher, MemoryPositionEntry, MetricsInfo, PositionFile, StatWatcher, TailWatcher, TargetInfo, TimerTrigger, WatcherSetupError

Constant Summary collapse

RESERVED_CHARS =
['/', '*', '%'].freeze

Constants included from Configurable

Configurable::CONFIG_TYPE_REGISTRY

Instance Attribute Summary collapse

Attributes included from GroupWatch

#default_group_key, #group_watchers

Attributes included from Fluent::PluginLoggerMixin

#log

Attributes inherited from Base

#under_plugin_development

Instance Method Summary collapse

Methods included from GroupWatch

#add_path_to_group_watcher, #construct_group_key, #construct_groupwatchers, #find_group, #find_group_from_metadata, included, #remove_path_from_group_watcher

Methods inherited from Input

#emit_records, #emit_size, #metric_callback, #multi_workers_ready?

Methods included from Fluent::PluginHelper::Mixin

included

Methods included from Fluent::PluginLoggerMixin

included, #terminate

Methods included from Fluent::PluginId

#plugin_id, #plugin_id_configured?, #plugin_id_for_test?, #plugin_root_dir

Methods inherited from Base

#acquire_worker_lock, #after_shutdown, #after_shutdown?, #after_start, #after_started?, #before_shutdown, #before_shutdown?, #called_in_test?, #closed?, #configured?, #context_router, #context_router=, #fluentd_worker_id, #get_lock_path, #has_router?, #inspect, #multi_workers_ready?, #plugin_root_dir, #reloadable_plugin?, #shutdown?, #started?, #stopped?, #string_safe_encoding, #terminate, #terminated?

Methods included from SystemConfig::Mixin

#system_config, #system_config_override

Methods included from Configurable

#config, #configure_proxy_generate, #configured_section_create, included, lookup_type, register_type

Constructor Details

#initializeTailInput

Returns a new instance of TailInput.



51
52
53
54
55
56
57
58
59
60
61
# File 'lib/fluent/plugin/in_tail.rb', line 51

def initialize
  super
  @paths = []
  @tails = {}
  @pf_file = nil
  @pf = nil
  @ignore_list = []
  @shutdown_start_time = nil
  @metrics = nil
  @startup = true
end

Instance Attribute Details

#pathsObject (readonly)

Returns the value of attribute paths.



121
122
123
# File 'lib/fluent/plugin/in_tail.rb', line 121

def paths
  @paths
end

Instance Method Details

#closeObject



275
276
277
278
279
# File 'lib/fluent/plugin/in_tail.rb', line 275

def close
  super
  # close file handles after all threads stopped (in #close of thread plugin helper)
  close_watcher_handles
end

#close_watcher_handlesObject



490
491
492
493
494
495
496
497
# File 'lib/fluent/plugin/in_tail.rb', line 490

def close_watcher_handles
  @tails.keys.each do |path|
    tw = @tails.delete(path)
    if tw
      tw.close
    end
  end
end

#configure(conf) ⇒ Object



123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
# File 'lib/fluent/plugin/in_tail.rb', line 123

def configure(conf)
  @variable_store = Fluent::VariableStore.fetch_or_build(:in_tail)
  compat_parameters_convert(conf, :parser)
  parser_config = conf.elements('parse').first
  unless parser_config
    raise Fluent::ConfigError, "<parse> section is required."
  end

  (1..Fluent::Plugin::MultilineParser::FORMAT_MAX_NUM).each do |n|
    parser_config["format#{n}"] = conf["format#{n}"] if conf["format#{n}"]
  end

  parser_config['unmatched_lines'] = conf['emit_unmatched_lines']

  super

  if !@enable_watch_timer && !@enable_stat_watcher
    raise Fluent::ConfigError, "either of enable_watch_timer or enable_stat_watcher must be true"
  end

  if RESERVED_CHARS.include?(@path_delimiter)
    rc = RESERVED_CHARS.join(', ')
    raise Fluent::ConfigError, "#{rc} are reserved words: #{@path_delimiter}"
  end

  @paths = @path.split(@path_delimiter).map(&:strip).uniq
  if @paths.empty?
    raise Fluent::ConfigError, "tail: 'path' parameter is required on tail input"
  end
  if @path_timezone
    Fluent::Timezone.validate!(@path_timezone)
    @path_formatters = @paths.map{|path| [path, Fluent::Timezone.formatter(@path_timezone, path)]}.to_h
    @exclude_path_formatters = @exclude_path.map{|path| [path, Fluent::Timezone.formatter(@path_timezone, path)]}.to_h
  end

  # TODO: Use plugin_root_dir and storage plugin to store positions if available
  if @pos_file
    if @variable_store.key?(@pos_file) && !called_in_test?
      plugin_id_using_this_path = @variable_store[@pos_file]
      raise Fluent::ConfigError, "Other 'in_tail' plugin already use same pos_file path: plugin_id = #{plugin_id_using_this_path}, pos_file path = #{@pos_file}"
    end
    @variable_store[@pos_file] = self.plugin_id
  else
    if @follow_inodes
      raise Fluent::ConfigError, "Can't follow inodes without pos_file configuration parameter"
    end
    $log.warn "'pos_file PATH' parameter is not set to a 'tail' source."
    $log.warn "this parameter is highly recommended to save the position to resume tailing."
  end

  configure_tag
  configure_encoding

  @multiline_mode = parser_config["@type"] =~ /multiline/
  @receive_handler = if @multiline_mode
                       method(:parse_multilines)
                     else
                       method(:parse_singleline)
                     end
  @file_perm = system_config.file_permission || Fluent::DEFAULT_FILE_PERMISSION
  @dir_perm = system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION
  # parser is already created by parser helper
  @parser = parser_create(usage: parser_config['usage'] || @parser_configs.first.usage)
  @capability = Fluent::Capability.new(:current_process)
  if @read_bytes_limit_per_second > 0
    if !@enable_watch_timer
      raise Fluent::ConfigError, "Need to enable watch timer when using log throttling feature"
    end
    min_bytes = TailWatcher::IOHandler::BYTES_TO_READ
    if @read_bytes_limit_per_second < min_bytes
      log.warn "Should specify greater equal than #{min_bytes}. Use #{min_bytes} for read_bytes_limit_per_second"
      @read_bytes_limit_per_second = min_bytes
    end
  end
  opened_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_opened_total", help_text: "Total number of opened files")
  closed_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_closed_total", help_text: "Total number of closed files")
  rotated_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_rotated_total", help_text: "Total number of rotated files")
  @metrics = MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics)
end

#configure_encodingObject



214
215
216
217
218
219
220
221
222
223
224
225
226
# File 'lib/fluent/plugin/in_tail.rb', line 214

def configure_encoding
  unless @encoding
    if @from_encoding
      raise Fluent::ConfigError, "tail: 'from_encoding' parameter must be specified with 'encoding' parameter."
    end
  end

  @encoding = parse_encoding_param(@encoding) if @encoding
  @from_encoding = parse_encoding_param(@from_encoding) if @from_encoding
  if @encoding && (@encoding == @from_encoding)
    log.warn "'encoding' and 'from_encoding' are same encoding. No effect"
  end
end

#configure_tagObject



203
204
205
206
207
208
209
210
211
212
# File 'lib/fluent/plugin/in_tail.rb', line 203

def configure_tag
  if @tag.index('*')
    @tag_prefix, @tag_suffix = @tag.split('*')
    @tag_prefix ||= ''
    @tag_suffix ||= ''
  else
    @tag_prefix = nil
    @tag_suffix = nil
  end
end

#construct_watcher(target_info) ⇒ Object



435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
# File 'lib/fluent/plugin/in_tail.rb', line 435

def construct_watcher(target_info)
  path = target_info.path

  # The file might be rotated or removed after collecting paths, so check inode again here.
  begin
    target_info.ino = Fluent::FileWrapper.stat(path).ino
  rescue Errno::ENOENT, Errno::EACCES
    $log.warn "stat() for #{path} failed. Continuing without tailing it."
    return
  end

  pe = nil
  if @pf
    pe = @pf[target_info]
    pe.update(target_info.ino, 0) if @read_from_head && pe.read_inode.zero?
  end

  begin
    tw = setup_watcher(target_info, pe)
  rescue WatcherSetupError => e
    log.warn "Skip #{path} because unexpected setup error happens: #{e}"
    return
  end

  @tails[path] = tw
  tw.on_notify
end

#convert_line_to_event(line, es, tail_watcher) ⇒ Object



661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
# File 'lib/fluent/plugin/in_tail.rb', line 661

def convert_line_to_event(line, es, tail_watcher)
  begin
    line.chomp!  # remove \n
    @parser.parse(line) { |time, record|
      if time && record
        record[@path_key] ||= tail_watcher.path unless @path_key.nil?
        es.add(time, record)
      else
        if @emit_unmatched_lines
          record = {'unmatched_line' => line}
          record[@path_key] ||= tail_watcher.path unless @path_key.nil?
          es.add(Fluent::EventTime.now, record)
        end
        log.warn "pattern not matched: #{line.inspect}"
      end
    }
  rescue => e
    log.warn 'invalid line found', file: tail_watcher.path, line: line, error: e.to_s
    log.debug_backtrace(e.backtrace)
  end
end

#detach_watcher(tw, ino, close_io = true) ⇒ Object

TailWatcher#close is called by another thread at shutdown phase. It causes ‘can’t modify string; temporarily locked’ error in IOHandler so adding close_io argument to avoid this problem. At shutdown, IOHandler’s io will be released automatically after detached the event loop



555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
# File 'lib/fluent/plugin/in_tail.rb', line 555

def detach_watcher(tw, ino, close_io = true)
  if @follow_inodes && tw.ino != ino
    log.warn("detach_watcher could be detaching an unexpected tail_watcher with a different ino.",
              path: tw.path, actual_ino_in_tw: tw.ino, expect_ino_to_close: ino)
  end
  tw.watchers.each do |watcher|
    event_loop_detach(watcher)
  end
  tw.detach(@shutdown_start_time)

  tw.close if close_io

  if tw.unwatched && @pf
    target_info = TargetInfo.new(tw.path, ino)
    @pf.unwatch(target_info)
  end
end

#detach_watcher_after_rotate_wait(tw, ino) ⇒ Object



579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
# File 'lib/fluent/plugin/in_tail.rb', line 579

def detach_watcher_after_rotate_wait(tw, ino)
  # Call event_loop_attach/event_loop_detach is high-cost for short-live object.
  # If this has a problem with large number of files, use @_event_loop directly instead of timer_execute.
  if @open_on_every_update
    # Detach now because it's already closed, waiting it doesn't make sense.
    detach_watcher(tw, ino)
  elsif throttling_is_enabled?(tw)
    # When the throttling feature is enabled, it might not reach EOF yet.
    # Should ensure to read all contents before closing it, with keeping throttling.
    start_time_to_wait = Fluent::Clock.now
    timer = timer_execute(:in_tail_close_watcher, 1, repeat: true) do
      elapsed = Fluent::Clock.now - start_time_to_wait
      if tw.eof? && elapsed >= @rotate_wait
        timer.detach
        detach_watcher(tw, ino)
      end
    end
  else
    # when the throttling feature isn't enabled, just wait @rotate_wait
    timer_execute(:in_tail_close_watcher, @rotate_wait, repeat: false) do
      detach_watcher(tw, ino)
    end
  end
end

#existence_pathObject



353
354
355
356
357
358
359
360
361
362
363
# File 'lib/fluent/plugin/in_tail.rb', line 353

def existence_path
  hash = {}
  @tails.each {|path, tw|
    if @follow_inodes
      hash[tw.ino] = TargetInfo.new(tw.path, tw.ino)
    else
      hash[tw.path] = TargetInfo.new(tw.path, tw.ino)
    end
  }
  hash
end

#expand_pathsObject



286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
# File 'lib/fluent/plugin/in_tail.rb', line 286

def expand_paths
  date = Fluent::EventTime.now
  paths = []
  @paths.each { |path|
    path = if @path_timezone
             @path_formatters[path].call(date)
           else
             date.to_time.strftime(path)
           end
    if path.include?('*')
      paths += Dir.glob(path).select { |p|
        begin
          is_file = !File.directory?(p)
          if (File.readable?(p) || have_read_capability?) && is_file
            if @limit_recently_modified && File.mtime(p) < (date.to_time - @limit_recently_modified)
              false
            else
              true
            end
          else
            if is_file
              unless @ignore_list.include?(p)
                log.warn "#{p} unreadable. It is excluded and would be examined next time."
                @ignore_list << p if @ignore_repeated_permission_error
              end
            end
            false
          end
        rescue Errno::ENOENT, Errno::EACCES
          log.debug("#{p} is missing after refresh file list")
          false
        end
      }
    else
      # When file is not created yet, Dir.glob returns an empty array. So just add when path is static.
      paths << path
    end
  }
  excluded = @exclude_path.map { |path|
    path = if @path_timezone
             @exclude_path_formatters[path].call(date)
           else
             date.to_time.strftime(path)
           end
    path.include?('*') ? Dir.glob(path) : path
  }.flatten.uniq
  # filter out non existing files, so in case pattern is without '*' we don't do unnecessary work
  hash = {}
  (paths - excluded).select { |path|
    FileTest.exist?(path)
  }.each { |path|
    # Even we just checked for existence, there is a race condition here as
    # of which stat() might fail with ENOENT. See #3224.
    begin
      target_info = TargetInfo.new(path, Fluent::FileWrapper.stat(path).ino)
      if @follow_inodes
        hash[target_info.ino] = target_info
      else
        hash[target_info.path] = target_info
      end
    rescue Errno::ENOENT, Errno::EACCES  => e
      $log.warn "expand_paths: stat() for #{path} failed with #{e.class.name}. Skip file."
    end
  }
  hash
end

#flush_buffer(tw, buf) ⇒ Object



604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
# File 'lib/fluent/plugin/in_tail.rb', line 604

def flush_buffer(tw, buf)
  buf.chomp!
  @parser.parse(buf) { |time, record|
    if time && record
      tag = if @tag_prefix || @tag_suffix
              @tag_prefix + tw.tag + @tag_suffix
            else
              @tag
            end
      record[@path_key] ||= tw.path unless @path_key.nil?
      router.emit(tag, time, record)
    else
      if @emit_unmatched_lines
        record = { 'unmatched_line' => buf }
        record[@path_key] ||= tail_watcher.path unless @path_key.nil?
        tag = if @tag_prefix || @tag_suffix
                @tag_prefix + tw.tag + @tag_suffix
              else
                @tag
              end
        router.emit(tag, Fluent::EventTime.now, record)
      end
      log.warn "got incomplete line at shutdown from #{tw.path}: #{buf.inspect}"
    end
  }
end

#have_read_capability?Boolean

Returns:

  • (Boolean)


281
282
283
284
# File 'lib/fluent/plugin/in_tail.rb', line 281

def have_read_capability?
  @capability.have_capability?(:effective, :dac_read_search) ||
    @capability.have_capability?(:effective, :dac_override)
end

#parse_encoding_param(encoding_name) ⇒ Object



228
229
230
231
232
233
234
# File 'lib/fluent/plugin/in_tail.rb', line 228

def parse_encoding_param(encoding_name)
  begin
    Encoding.find(encoding_name) if encoding_name
  rescue ArgumentError => e
    raise Fluent::ConfigError, e.message
  end
end

#parse_multilines(lines, tail_watcher) ⇒ Object

No need to check if line_buffer_timer_flusher is nil, since line_buffer_timer_flusher should exist



692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
# File 'lib/fluent/plugin/in_tail.rb', line 692

def parse_multilines(lines, tail_watcher)
  lb = tail_watcher.line_buffer_timer_flusher.line_buffer
  es = Fluent::MultiEventStream.new
  if @parser.has_firstline?
    tail_watcher.line_buffer_timer_flusher.reset_timer
    lines.each { |line|
      if @parser.firstline?(line)
        if lb
          convert_line_to_event(lb, es, tail_watcher)
        end
        lb = line
      else
        if lb.nil?
          if @emit_unmatched_lines
            convert_line_to_event(line, es, tail_watcher)
          end
          log.warn "got incomplete line before first line from #{tail_watcher.path}: #{line.inspect}"
        else
          lb << line
        end
      end
    }
  else
    lb ||= ''
    lines.each do |line|
      lb << line
      @parser.parse(lb) { |time, record|
        if time && record
          convert_line_to_event(lb, es, tail_watcher)
          lb = ''
        end
      }
    end
  end
  tail_watcher.line_buffer_timer_flusher.line_buffer = lb
  es
end

#parse_singleline(lines, tail_watcher) ⇒ Object



683
684
685
686
687
688
689
# File 'lib/fluent/plugin/in_tail.rb', line 683

def parse_singleline(lines, tail_watcher)
  es = Fluent::MultiEventStream.new
  lines.each { |line|
    convert_line_to_event(line, es, tail_watcher)
  }
  es
end

#receive_lines(lines, tail_watcher) ⇒ Object

Returns true if no error or unrecoverable error happens in emit action. false if got BufferOverflowError.

Returns:

  • true if no error or unrecoverable error happens in emit action. false if got BufferOverflowError



632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
# File 'lib/fluent/plugin/in_tail.rb', line 632

def receive_lines(lines, tail_watcher)
  lines = lines.reject do |line|
    skip_line = @max_line_size ? line.bytesize > @max_line_size : false
    if skip_line
      log.warn "received line length is longer than #{@max_line_size}"
      log.debug "skipped line: #{line.chomp}"
    end
    skip_line
  end
  es = @receive_handler.call(lines, tail_watcher)
  unless es.empty?
    tag = if @tag_prefix || @tag_suffix
            @tag_prefix + tail_watcher.tag + @tag_suffix
          else
            @tag
          end
    begin
      router.emit_stream(tag, es)
    rescue Fluent::Plugin::Buffer::BufferOverflowError
      return false
    rescue
      # ignore non BufferQueueLimitError errors because in_tail can't recover. Engine shows logs and backtraces.
      return true
    end
  end

  return true
end

#refresh_watchersObject

in_tail with ‘*’ path doesn’t check rotation file equality at refresh phase. So you should not use ‘*’ path when your logs will be rotated by another tool. It will cause log duplication after updated watch files. In such case, you should separate log directory and specify two paths in path parameter. e.g. path /path/to/dir/*,/path/to/rotated_logs/target_file



370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
# File 'lib/fluent/plugin/in_tail.rb', line 370

def refresh_watchers
  target_paths_hash = expand_paths
  existence_paths_hash = existence_path

  log.debug {
    target_paths_str = target_paths_hash.collect { |key, target_info| target_info.path }.join(",")
    existence_paths_str = existence_paths_hash.collect { |key, target_info| target_info.path }.join(",")
    "tailing paths: target = #{target_paths_str} | existing = #{existence_paths_str}"
  }

  if !@follow_inodes
    need_unwatch_in_stop_watchers = true
  else
    # When using @follow_inodes, need this to unwatch the rotated old inode when it disappears.
    # After `update_watcher` detaches an old TailWatcher, the inode is lost from the `@tails`.
    # So that inode can't be contained in `removed_hash`, and can't be unwatched by `stop_watchers`.
    #
    # This logic may work for `@follow_inodes false` too.
    # Just limiting the case to supress the impact to existing logics.
    @pf&.unwatch_removed_targets(target_paths_hash)
    need_unwatch_in_stop_watchers = false
  end

  removed_hash = existence_paths_hash.reject {|key, value| target_paths_hash.key?(key)}
  added_hash = target_paths_hash.reject {|key, value| existence_paths_hash.key?(key)}

  stop_watchers(removed_hash, unwatched: need_unwatch_in_stop_watchers) unless removed_hash.empty?
  start_watchers(added_hash) unless added_hash.empty?
  @startup = false if @startup
end

#setup_watcher(target_info, pe) ⇒ Object



401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
# File 'lib/fluent/plugin/in_tail.rb', line 401

def setup_watcher(target_info, pe)
  line_buffer_timer_flusher = @multiline_mode ? TailWatcher::LineBufferTimerFlusher.new(log, @multiline_flush_interval, &method(:flush_buffer)) : nil
  read_from_head = !@startup || @read_from_head
  tw = TailWatcher.new(target_info, pe, log, read_from_head, @follow_inodes, method(:update_watcher), line_buffer_timer_flusher, method(:io_handler), @metrics)

  if @enable_watch_timer
    tt = TimerTrigger.new(1, log) { tw.on_notify }
    tw.register_watcher(tt)
  end

  if @enable_stat_watcher
    tt = StatWatcher.new(target_info.path, log) { tw.on_notify }
    tw.register_watcher(tt)
  end

  tw.watchers.each do |watcher|
    event_loop_attach(watcher)
  end

  tw.group_watcher = add_path_to_group_watcher(target_info.path)

  tw
rescue => e
  if tw
    tw.watchers.each do |watcher|
      event_loop_detach(watcher)
    end

    tw.detach(@shutdown_start_time)
    tw.close
  end
  raise e
end

#shutdownObject



266
267
268
269
270
271
272
273
# File 'lib/fluent/plugin/in_tail.rb', line 266

def shutdown
  @shutdown_start_time = Fluent::Clock.now
  # during shutdown phase, don't close io. It should be done in close after all threads are stopped. See close.
  stop_watchers(existence_path, immediate: true, remove_watcher: false)
  @pf_file.close if @pf_file

  super
end

#startObject



236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
# File 'lib/fluent/plugin/in_tail.rb', line 236

def start
  super

  if @pos_file
    pos_file_dir = File.dirname(@pos_file)
    FileUtils.mkdir_p(pos_file_dir, mode: @dir_perm) unless Dir.exist?(pos_file_dir)
    @pf_file = File.open(@pos_file, File::RDWR|File::CREAT|File::BINARY, @file_perm)
    @pf_file.sync = true
    @pf = PositionFile.load(@pf_file, @follow_inodes, expand_paths, logger: log)

    if @pos_file_compaction_interval
      timer_execute(:in_tail_refresh_compact_pos_file, @pos_file_compaction_interval) do
        log.info('Clean up the pos file')
        @pf.try_compact
      end
    end
  end

  refresh_watchers unless @skip_refresh_on_startup
  timer_execute(:in_tail_refresh_watchers, @refresh_interval, &method(:refresh_watchers))
end

#start_watchers(targets_info) ⇒ Object



463
464
465
466
467
468
# File 'lib/fluent/plugin/in_tail.rb', line 463

def start_watchers(targets_info)
  targets_info.each_value {|target_info|
    construct_watcher(target_info)
    break if before_shutdown?
  }
end

#statisticsObject



730
731
732
733
734
735
736
737
738
739
740
741
# File 'lib/fluent/plugin/in_tail.rb', line 730

def statistics
  stats = super

  stats = {
    'input' => stats["input"].merge({
      'opened_file_count' => @metrics.opened.get,
      'closed_file_count' => @metrics.closed.get,
      'rotated_file_count' => @metrics.rotated.get,
    })
  }
  stats
end

#stopObject



258
259
260
261
262
263
264
# File 'lib/fluent/plugin/in_tail.rb', line 258

def stop
  if @variable_store
    @variable_store.delete(@pos_file)
  end

  super
end

#stop_watchers(targets_info, immediate: false, unwatched: false, remove_watcher: true) ⇒ Object



470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
# File 'lib/fluent/plugin/in_tail.rb', line 470

def stop_watchers(targets_info, immediate: false, unwatched: false, remove_watcher: true)
  targets_info.each_value { |target_info|
    remove_path_from_group_watcher(target_info.path)

    if remove_watcher
      tw = @tails.delete(target_info.path)
    else
      tw = @tails[target_info.path]
    end
    if tw
      tw.unwatched = unwatched
      if immediate
        detach_watcher(tw, target_info.ino, false)
      else
        detach_watcher_after_rotate_wait(tw, target_info.ino)
      end
    end
  }
end

#throttling_is_enabled?(tw) ⇒ Boolean

Returns:

  • (Boolean)


573
574
575
576
577
# File 'lib/fluent/plugin/in_tail.rb', line 573

def throttling_is_enabled?(tw)
  return true if @read_bytes_limit_per_second > 0
  return true if tw.group_watcher && tw.group_watcher.limit >= 0
  false
end

#update_watcher(tail_watcher, pe, new_inode) ⇒ Object

refresh_watchers calls @tails.keys so we don’t use stop_watcher -> start_watcher sequence for safety.



500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
# File 'lib/fluent/plugin/in_tail.rb', line 500

def update_watcher(tail_watcher, pe, new_inode)
  # TODO we should use another callback for this.
  # To supress impact to existing logics, limit the case to `@follow_inodes`.
  # We may not need `@follow_inodes` condition.
  if @follow_inodes && new_inode.nil?
    # nil inode means the file disappeared, so we only need to stop it.
    @tails.delete(tail_watcher.path)
    # https://github.com/fluent/fluentd/pull/4237#issuecomment-1633358632 
    # Because of this problem, log duplication can occur during `rotate_wait`.
    # Need to set `rotate_wait 0` for a workaround.
    # Duplication will occur if `refresh_watcher` is called during the `rotate_wait`.
    # In that case, `refresh_watcher` will add the new TailWatcher to tail the same target,
    # and it causes the log duplication.
    # (Other `detach_watcher_after_rotate_wait` may have the same problem.
    #  We need the mechanism not to add duplicated TailWathcer with detaching TailWatcher.)
    detach_watcher_after_rotate_wait(tail_watcher, pe.read_inode)
    return
  end

  path = tail_watcher.path

  log.info("detected rotation of #{path}; waiting #{@rotate_wait} seconds")

  if @pf
    pe_inode = pe.read_inode
    target_info_from_position_entry = TargetInfo.new(path, pe_inode)
    unless pe_inode == @pf[target_info_from_position_entry].read_inode
      log.warn "Skip update_watcher because watcher has been already updated by other inotify event",
               path: path, inode: pe.read_inode, inode_in_pos_file: @pf[target_info_from_position_entry].read_inode
      return
    end
  end

  new_target_info = TargetInfo.new(path, new_inode)

  if @follow_inodes
    new_position_entry = @pf[new_target_info]
    # If `refresh_watcher` find the new file before, this will not be zero.
    # In this case, only we have to do is detaching the current tail_watcher.
    if new_position_entry.read_inode == 0
      @tails[path] = setup_watcher(new_target_info, new_position_entry)
      @tails[path].on_notify
    end
  else
    @tails[path] = setup_watcher(new_target_info, pe)
    @tails[path].on_notify
  end

  detach_watcher_after_rotate_wait(tail_watcher, pe.read_inode)
end