Class: Fluent::Plugin::CloudwatchLogsOutput
- Inherits:
-
Output
- Object
- Output
- Fluent::Plugin::CloudwatchLogsOutput
- Defined in:
- lib/fluent/plugin/out_cloudwatch_logs.rb
Defined Under Namespace
Classes: TooLargeEventError
Constant Summary collapse
- DEFAULT_BUFFER_TYPE =
"memory"
- MAX_EVENTS_SIZE =
1_048_576
- MAX_EVENT_SIZE =
256 * 1024
- EVENT_HEADER_SIZE =
26
Instance Method Summary collapse
- #configure(conf) ⇒ Object
- #format(tag, time, record) ⇒ Object
- #formatted_to_msgpack_binary? ⇒ Boolean
-
#initialize ⇒ CloudwatchLogsOutput
constructor
A new instance of CloudwatchLogsOutput.
- #multi_workers_ready? ⇒ Boolean
- #start ⇒ Object
- #write(chunk) ⇒ Object
Constructor Details
#initialize ⇒ CloudwatchLogsOutput
Returns a new instance of CloudwatchLogsOutput.
74 75 76 77 78 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 74 def initialize super require 'aws-sdk-cloudwatchlogs' end |
Instance Method Details
#configure(conf) ⇒ Object
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 80 def configure(conf) compat_parameters_convert(conf, :buffer, :inject) super unless [conf['log_group_name'], conf['use_tag_as_group'], conf['log_group_name_key']].compact.size == 1 raise Fluent::ConfigError, "Set only one of log_group_name, use_tag_as_group and log_group_name_key" end unless [conf['log_stream_name'], conf['use_tag_as_stream'], conf['log_stream_name_key']].compact.size == 1 raise Fluent::ConfigError, "Set only one of log_stream_name, use_tag_as_stream and log_stream_name_key" end if [conf['log_group_aws_tags'], conf['log_group_aws_tags_key']].compact.size > 1 raise ConfigError, "Set only one of log_group_aws_tags, log_group_aws_tags_key" end if [conf['retention_in_days'], conf['retention_in_days_key']].compact.size > 1 raise ConfigError, "Set only one of retention_in_days, retention_in_days_key" end formatter_conf = conf.elements('format').first @formatter_proc = unless formatter_conf unless @message_keys.empty? Proc.new { |tag, time, record| @message_keys.map{|k| record[k].to_s }.reject{|e| e.empty? }.join(' ') } else Proc.new { |tag, time, record| @json_handler.dump(record) } end else formatter = formatter_create(usage: 'cloudwatch-logs-plugin', conf: formatter_conf) formatter.method(:format) end end |
#format(tag, time, record) ⇒ Object
178 179 180 181 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 178 def format(tag, time, record) record = inject_values_to_record(tag, time, record) Fluent::MessagePackFactory.msgpack_packer.pack([tag, time, record]).to_s end |
#formatted_to_msgpack_binary? ⇒ Boolean
183 184 185 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 183 def formatted_to_msgpack_binary? true end |
#multi_workers_ready? ⇒ Boolean
187 188 189 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 187 def multi_workers_ready? true end |
#start ⇒ Object
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 117 def start super = {} [:logger] = log if log [:log_level] = :debug if log [:region] = @region if @region [:endpoint] = @endpoint if @endpoint [:ssl_verify_peer] = @ssl_verify_peer [:instance_profile_credentials_retries] = @aws_instance_profile_credentials_retries if @aws_instance_profile_credentials_retries if @aws_use_sts Aws.config[:region] = [:region] = { role_arn: @aws_sts_role_arn, role_session_name: @aws_sts_session_name, external_id: @aws_sts_external_id, policy: @aws_sts_policy, duration_seconds: @aws_sts_duration_seconds } [:sts_endpoint_url] = @aws_sts_endpoint_url if @aws_sts_endpoint_url if @region and @aws_sts_endpoint_url [:client] = Aws::STS::Client.new(:region => @region, endpoint: @aws_sts_endpoint_url) elsif @region [:client] = Aws::STS::Client.new(:region => @region) end [:credentials] = Aws::AssumeRoleCredentials.new() elsif @web_identity_credentials c = @web_identity_credentials = {} [:role_arn] = c.role_arn [:role_session_name] = c.role_session_name [:web_identity_token_file] = c.web_identity_token_file [:policy] = c.policy if c.policy [:duration_seconds] = c.duration_seconds if c.duration_seconds if @region [:client] = Aws::STS::Client.new(:region => @region) end [:credentials] = Aws::AssumeRoleWebIdentityCredentials.new() elsif @aws_ecs_authentication # collect AWS credential from ECS relative uri ENV variable aws_container_credentials_relative_uri = ENV["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] [:credentials] = Aws::ECSCredentials.new({credential_path: aws_container_credentials_relative_uri}).credentials else [:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key end [:http_proxy] = @http_proxy if @http_proxy @logs ||= Aws::CloudWatchLogs::Client.new() @sequence_tokens = {} @store_next_sequence_token_mutex = Mutex.new log.debug "Aws::CloudWatchLogs::Client initialized: log.level #{log.level} => #{[:log_level]}" @json_handler = case @json_handler when :yajl Yajl when :json JSON end end |
#write(chunk) ⇒ Object
191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 |
# File 'lib/fluent/plugin/out_cloudwatch_logs.rb', line 191 def write(chunk) log_group_name = extract_placeholders(@log_group_name, chunk) if @log_group_name log_stream_name = extract_placeholders(@log_stream_name, chunk) if @log_stream_name = @log_group_aws_tags.each {|k, v| @log_group_aws_tags[extract_placeholders(k, chunk)] = extract_placeholders(v, chunk) } if @log_group_aws_tags queue = Thread::Queue.new chunk.enum_for(:msgpack_each).select {|tag, time, record| if record.nil? log.warn "record is nil (tag=#{tag})" false else true end }.group_by {|tag, time, record| group = case when @use_tag_as_group tag when @log_group_name_key if @remove_log_group_name_key record.delete(@log_group_name_key) else record[@log_group_name_key] end else log_group_name end stream = case when @use_tag_as_stream tag when @log_stream_name_key if @remove_log_stream_name_key record.delete(@log_stream_name_key) else record[@log_stream_name_key] end else log_stream_name end [group, stream] }.each {|group_stream, rs| group_name, stream_name = group_stream if stream_name.nil? log.warn "stream_name is nil (group_name=#{group_name})" next end unless log_group_exists?(group_name) #rs = [[name, timestamp, record],[name,timestamp,record]] #get tags and retention from first record #as we create log group only once, values from first record will persist record = rs[0][2] = unless @log_group_aws_tags_key.nil? if @remove_log_group_aws_tags_key = record.delete(@log_group_aws_tags_key) else = record[@log_group_aws_tags_key] end end retention_in_days = @retention_in_days unless @retention_in_days_key.nil? if @remove_retention_in_days_key retention_in_days = record.delete(@retention_in_days_key) else retention_in_days = record[@retention_in_days_key] end end if @auto_create_stream create_log_group(group_name, , retention_in_days) else log.warn "Log group '#{group_name}' does not exist" next end end unless log_stream_exists?(group_name, stream_name) if @auto_create_stream create_log_stream(group_name, stream_name) else log.warn "Log stream '#{stream_name}' does not exist" next end end events = [] rs.each do |t, time, record| if @log_group_aws_tags_key && @remove_log_group_aws_tags_key record.delete(@log_group_aws_tags_key) end if @retention_in_days_key && @remove_retention_in_days_key record.delete(@retention_in_days_key) end record = drop_empty_record(record) time_ms = (time.to_f * 1000).floor scrub_record!(record) = @formatter_proc.call(t, time, record) if .empty? log.warn "Within specified message_key(s): (#{@message_keys.join(',')}) do not have non-empty record. Skip." next end if @max_message_length = .slice(0, @max_message_length) end events << {timestamp: time_ms, message: } end # The log events in the batch must be in chronological ordered by their timestamp. # http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html events = events.sort_by {|e| e[:timestamp] } queue << [group_name, stream_name, events] } @concurrency.times do queue << nil end threads = @concurrency.times.map do |i| Thread.start do while job = queue.shift group_name, stream_name, events = job put_events_by_chunk(group_name, stream_name, events) end end end threads.each(&:join) end |