Class: Fluent::CloudwatchLogsOutput
- Inherits:
-
BufferedOutput
- Object
- BufferedOutput
- Fluent::CloudwatchLogsOutput
- Includes:
- Mixin::ConfigPlaceholders, SetTimeKeyMixin
- Defined in:
- lib/fluent/plugin/out_cloudwatch_logs.rb
Constant Summary collapse
- MAX_EVENTS_SIZE =
1_048_576
- MAX_EVENT_SIZE =
256 * 1024
- EVENT_HEADER_SIZE =
26
Instance Method Summary collapse
- #configure(conf) ⇒ Object
- #format(tag, time, record) ⇒ Object
-
#initialize ⇒ CloudwatchLogsOutput
constructor
A new instance of CloudwatchLogsOutput.
- #placeholders ⇒ Object
- #start ⇒ Object
- #write(chunk) ⇒ Object
Constructor Details
#initialize ⇒ CloudwatchLogsOutput
Returns a new instance of CloudwatchLogsOutput.
42 43 44 45 46 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 42 def initialize super require 'aws-sdk-cloudwatchlogs' end |
Instance Method Details
#configure(conf) ⇒ Object
52 53 54 55 56 57 58 59 60 61 62 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 52 def configure(conf) super unless [conf['log_group_name'], conf['use_tag_as_group'], conf['log_group_name_key']].compact.size == 1 raise ConfigError, "Set only one of log_group_name, use_tag_as_group and log_group_name_key" end unless [conf['log_stream_name'], conf['use_tag_as_stream'], conf['log_stream_name_key']].compact.size == 1 raise ConfigError, "Set only one of log_stream_name, use_tag_as_stream and log_stream_name_key" end end |
#format(tag, time, record) ⇒ Object
76 77 78 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 76 def format(tag, time, record) [tag, time, record].to_msgpack end |
#placeholders ⇒ Object
48 49 50 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 48 def placeholders [:percent] end |
#start ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 64 def start super = {} [:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key [:region] = @region if @region [:http_proxy] = @http_proxy if @http_proxy @logs ||= Aws::CloudWatchLogs::Client.new() @sequence_tokens = {} @store_next_sequence_token_mutex = Mutex.new end |
#write(chunk) ⇒ Object
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 80 def write(chunk) queue = Thread::Queue.new chunk.enum_for(:msgpack_each).select {|tag, time, record| if record.nil? log.warn "record is nil (tag=#{tag})" false else true end }.group_by {|tag, time, record| group = case when @use_tag_as_group tag when @log_group_name_key if @remove_log_group_name_key record.delete(@log_group_name_key) else record[@log_group_name_key] end else @log_group_name end stream = case when @use_tag_as_stream tag when @log_stream_name_key if @remove_log_stream_name_key record.delete(@log_stream_name_key) else record[@log_stream_name_key] end else @log_stream_name end [group, stream] }.each {|group_stream, rs| group_name, stream_name = group_stream if stream_name.nil? log.warn "stream_name is nil (group_name=#{group_name})" next end unless log_group_exists?(group_name) if @auto_create_stream create_log_group(group_name) else log.warn "Log group '#{group_name}' does not exist" next end end unless log_stream_exists?(group_name, stream_name) if @auto_create_stream create_log_stream(group_name, stream_name) else log.warn "Log stream '#{stream_name}' does not exist" next end end events = [] rs.each do |t, time, record| time_ms = time * 1000 scrub_record!(record) if @message_keys = @message_keys.split(',').map {|k| record[k].to_s }.join(' ') else = Yajl.dump(record) end if @max_message_length = .slice(0, @max_message_length) end events << {timestamp: time_ms, message: } end # The log events in the batch must be in chronological ordered by their timestamp. # http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html events = events.sort_by {|e| e[:timestamp] } queue << [group_name, stream_name, events] } @concurrency.times do queue << nil end threads = @concurrency.times.map do |i| Thread.start do while job = queue.shift group_name, stream_name, events = job put_events_by_chunk(group_name, stream_name, events) end end end threads.each(&:join) end |