Class: Fluent::Plugin::CloudwatchLogsOutput
- Inherits:
-
Output
- Object
- Output
- Fluent::Plugin::CloudwatchLogsOutput
- Includes:
- MessagePackFactory::Mixin
- Defined in:
- lib/fluent/plugin/out_cloudwatch_logs.rb
Constant Summary collapse
- DEFAULT_BUFFER_TYPE =
"memory"
- MAX_EVENTS_SIZE =
1_048_576
- MAX_EVENT_SIZE =
256 * 1024
- EVENT_HEADER_SIZE =
26
Instance Method Summary collapse
- #configure(conf) ⇒ Object
- #format(tag, time, record) ⇒ Object
- #formatted_to_msgpack_binary? ⇒ Boolean
-
#initialize ⇒ CloudwatchLogsOutput
constructor
A new instance of CloudwatchLogsOutput.
- #multi_workers_ready? ⇒ Boolean
- #start ⇒ Object
- #write(chunk) ⇒ Object
Constructor Details
#initialize ⇒ CloudwatchLogsOutput
Returns a new instance of CloudwatchLogsOutput.
56 57 58 59 60 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 56 def initialize super require 'aws-sdk-cloudwatchlogs' end |
Instance Method Details
#configure(conf) ⇒ Object
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 62 def configure(conf) compat_parameters_convert(conf, :buffer, :inject) super unless [conf['log_group_name'], conf['use_tag_as_group'], conf['log_group_name_key']].compact.size == 1 raise Fluent::ConfigError, "Set only one of log_group_name, use_tag_as_group and log_group_name_key" end unless [conf['log_stream_name'], conf['use_tag_as_stream'], conf['log_stream_name_key']].compact.size == 1 raise Fluent::ConfigError, "Set only one of log_stream_name, use_tag_as_stream and log_stream_name_key" end if [conf['log_group_aws_tags'], conf['log_group_aws_tags_key']].compact.size > 1 raise ConfigError, "Set only one of log_group_aws_tags, log_group_aws_tags_key" end if [conf['retention_in_days'], conf['retention_in_days_key']].compact.size > 1 raise ConfigError, "Set only one of retention_in_days, retention_in_days_key" end end |
#format(tag, time, record) ⇒ Object
113 114 115 116 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 113 def format(tag, time, record) record = inject_values_to_record(tag, time, record) msgpack_packer.pack([tag, time, record]).to_s end |
#formatted_to_msgpack_binary? ⇒ Boolean
118 119 120 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 118 def formatted_to_msgpack_binary? true end |
#multi_workers_ready? ⇒ Boolean
122 123 124 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 122 def multi_workers_ready? true end |
#start ⇒ Object
83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 83 def start super = {} [:region] = @region if @region [:endpoint] = @endpoint if @endpoint [:instance_profile_credentials_retries] = @aws_instance_profile_credentials_retries if @aws_instance_profile_credentials_retries if @aws_use_sts Aws.config[:region] = [:region] [:credentials] = Aws::AssumeRoleCredentials.new( role_arn: @aws_sts_role_arn, role_session_name: @aws_sts_session_name ) else [:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key end [:http_proxy] = @http_proxy if @http_proxy @logs ||= Aws::CloudWatchLogs::Client.new() @sequence_tokens = {} @store_next_sequence_token_mutex = Mutex.new @json_handler = case @json_handler when :yajl Yajl when :json JSON end end |
#write(chunk) ⇒ Object
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 126 def write(chunk) log_group_name = extract_placeholders(@log_group_name, chunk) if @log_group_name log_stream_name = extract_placeholders(@log_stream_name, chunk) if @log_stream_name queue = Thread::Queue.new chunk.enum_for(:msgpack_each).select {|tag, time, record| if record.nil? log.warn "record is nil (tag=#{tag})" false else true end }.group_by {|tag, time, record| group = case when @use_tag_as_group tag when @log_group_name_key if @remove_log_group_name_key record.delete(@log_group_name_key) else record[@log_group_name_key] end else log_group_name end stream = case when @use_tag_as_stream tag when @log_stream_name_key if @remove_log_stream_name_key record.delete(@log_stream_name_key) else record[@log_stream_name_key] end else log_stream_name end [group, stream] }.each {|group_stream, rs| group_name, stream_name = group_stream if stream_name.nil? log.warn "stream_name is nil (group_name=#{group_name})" next end unless log_group_exists?(group_name) #rs = [[name, timestamp, record],[name,timestamp,record]] #get tags and retention from first record #as we create log group only once, values from first record will persist record = rs[0][2] = @log_group_aws_tags unless @log_group_aws_tags_key.nil? if @remove_log_group_aws_tags_key = record.delete(@log_group_aws_tags_key) else = record[@log_group_aws_tags_key] end end retention_in_days = @retention_in_days unless @retention_in_days_key.nil? if @remove_retention_in_days_key retention_in_days = record.delete(@retention_in_days_key) else retention_in_days = record[@retention_in_days_key] end end if @auto_create_stream create_log_group(group_name, , retention_in_days) else log.warn "Log group '#{group_name}' does not exist" next end end unless log_stream_exists?(group_name, stream_name) if @auto_create_stream create_log_stream(group_name, stream_name) else log.warn "Log stream '#{stream_name}' does not exist" next end end events = [] rs.each do |t, time, record| time_ms = (time.to_f * 1000).floor scrub_record!(record) unless @message_keys.empty? = @message_keys.map {|k| record[k].to_s }.join(' ') else = @json_handler.dump(record) end if @max_message_length = .slice(0, @max_message_length) end events << {timestamp: time_ms, message: } end # The log events in the batch must be in chronological ordered by their timestamp. # http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html events = events.sort_by {|e| e[:timestamp] } queue << [group_name, stream_name, events] } @concurrency.times do queue << nil end threads = @concurrency.times.map do |i| Thread.start do while job = queue.shift group_name, stream_name, events = job put_events_by_chunk(group_name, stream_name, events) end end end threads.each(&:join) end |