Class: Fluent::KafkaOutputBuffered2
- Inherits:
-
BufferedOutput
- Object
- BufferedOutput
- Fluent::KafkaOutputBuffered2
- Defined in:
- lib/fluent/plugin/out_rdkafka.rb
Constant Summary
Constants included from Fluent::KafkaPluginUtil::SSLSettings
Fluent::KafkaPluginUtil::SSLSettings::DummyFormatter
Instance Method Summary collapse
- #build_config ⇒ Object
- #configure(conf) ⇒ Object
- #emit(tag, es, chain) ⇒ Object
- #enqueue_with_retry(producer, topic, record_buf, message_key, partition, time) ⇒ Object
- #format_stream(tag, es) ⇒ Object
- #get_producer ⇒ Object
-
#initialize ⇒ KafkaOutputBuffered2
constructor
A new instance of KafkaOutputBuffered2.
- #multi_workers_ready? ⇒ Boolean
- #setup_formatter(conf) ⇒ Object
- #shutdown ⇒ Object
- #shutdown_producers ⇒ Object
- #start ⇒ Object
- #write(chunk) ⇒ Object
Methods included from Fluent::KafkaPluginUtil::SaslSettings
Methods included from Fluent::KafkaPluginUtil::SSLSettings
included, #pickup_ssl_endpoint, #read_ssl_file
Constructor Details
#initialize ⇒ KafkaOutputBuffered2
Returns a new instance of KafkaOutputBuffered2.
86 87 88 89 90 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 86 def initialize super @producers = {} @producers_mutex = Mutex.new end |
Instance Method Details
#build_config ⇒ Object
127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 127 def build_config config = { :"bootstrap.servers" => @brokers, } if @ssl_ca_cert && @ssl_ca_cert[0] ssl = true config[:"ssl.ca.location"] = @ssl_ca_cert[0] config[:"ssl.certificate.location"] = @ssl_client_cert if @ssl_client_cert config[:"ssl.key.location"] = @ssl_client_cert_key if @ssl_client_cert_key config[:"ssl.key.password"] = @ssl_client_cert_key_password if @ssl_client_cert_key_password end if @principal sasl = true config[:"sasl.mechanisms"] = "GSSAPI" config[:"sasl.kerberos.principal"] = @principal config[:"sasl.kerberos.service.name"] = @service_name if @service_name config[:"sasl.kerberos.keytab"] = @keytab if @keytab end if ssl && sasl security_protocol = "SASL_SSL" elsif ssl && !sasl security_protocol = "SSL" elsif !ssl && sasl security_protocol = "SASL_PLAINTEXT" else security_protocol = "PLAINTEXT" end config[:"security.protocol"] = security_protocol config[:"compression.codec"] = @compression_codec if @compression_codec config[:"message.send.max.retries"] = @max_send_retries if @max_send_retries config[:"request.required.acks"] = @required_acks if @required_acks config[:"request.timeout.ms"] = @ack_timeout * 1000 if @ack_timeout config[:"queue.buffering.max.ms"] = @rdkafka_buffering_max_ms if @rdkafka_buffering_max_ms config[:"queue.buffering.max.messages"] = @rdkafka_buffering_max_messages if @rdkafka_buffering_max_messages config[:"message.max.bytes"] = @rdkafka_message_max_bytes if @rdkafka_message_max_bytes config[:"batch.num.messages"] = @rdkafka_message_max_num if @rdkafka_message_max_num @rdkafka_options.each { |k, v| config[k.to_sym] = v } config end |
#configure(conf) ⇒ Object
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 92 def configure(conf) super log.instance_eval { def add(level, = nil) if .nil? if block_given? = yield else return end end # Follow rdkakfa's log level. See also rdkafka-ruby's bindings.rb: https://github.com/appsignal/rdkafka-ruby/blob/e5c7261e3f2637554a5c12b924be297d7dca1328/lib/rdkafka/bindings.rb#L117 case level when Logger::FATAL self.fatal() when Logger::ERROR self.error() when Logger::WARN self.warn() when Logger::INFO self.info() when Logger::DEBUG self.debug() else self.trace() end end } Rdkafka::Config.logger = log config = build_config @rdkafka = Rdkafka::Config.new(config) @formatter_proc = setup_formatter(conf) end |
#emit(tag, es, chain) ⇒ Object
215 216 217 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 215 def emit(tag, es, chain) super(tag, es, chain, tag) end |
#enqueue_with_retry(producer, topic, record_buf, message_key, partition, time) ⇒ Object
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 303 def enqueue_with_retry(producer, topic, record_buf, , partition, time) attempt = 0 loop do begin handler = producer.produce(topic: topic, payload: record_buf, key: , partition: partition, timestamp: @use_event_time ? Time.at(time) : nil) return handler rescue Exception => e if e.respond_to?(:code) && e.code == :queue_full if attempt <= @max_enqueue_retries log.warn "Failed to enqueue message; attempting retry #{attempt} of #{@max_enqueue_retries} after #{@enqueue_retry_backoff}s" sleep @enqueue_retry_backoff attempt += 1 else raise "Failed to enqueue message although tried retry #{@max_enqueue_retries} times" end else raise e end end end end |
#format_stream(tag, es) ⇒ Object
219 220 221 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 219 def format_stream(tag, es) es.to_msgpack_stream end |
#get_producer ⇒ Object
204 205 206 207 208 209 210 211 212 213 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 204 def get_producer @producers_mutex.synchronize { producer = @producers[Thread.current.object_id] unless producer producer = @rdkafka.producer @producers[Thread.current.object_id] = producer end producer } end |
#multi_workers_ready? ⇒ Boolean
179 180 181 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 179 def multi_workers_ready? true end |
#setup_formatter(conf) ⇒ Object
223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 223 def setup_formatter(conf) if @output_data_type == 'json' begin require 'oj' Oj. = Fluent::DEFAULT_OJ_OPTIONS Proc.new { |tag, time, record| Oj.dump(record) } rescue LoadError require 'yajl' Proc.new { |tag, time, record| Yajl::Encoder.encode(record) } end elsif @output_data_type == 'ltsv' require 'ltsv' Proc.new { |tag, time, record| LTSV.dump(record) } elsif @output_data_type == 'msgpack' require 'msgpack' Proc.new { |tag, time, record| record.to_msgpack } elsif @output_data_type =~ /^attr:(.*)$/ @custom_attributes = $1.split(',').map(&:strip).reject(&:empty?) @custom_attributes.unshift('time') if @output_include_time @custom_attributes.unshift('tag') if @output_include_tag Proc.new { |tag, time, record| @custom_attributes.map { |attr| record[attr].nil? ? '' : record[attr].to_s }.join(@f_separator) } else @formatter = Fluent::Plugin.new_formatter(@output_data_type) @formatter.configure(conf) @formatter.method(:format) end end |
#shutdown ⇒ Object
183 184 185 186 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 183 def shutdown super shutdown_producers end |
#shutdown_producers ⇒ Object
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 188 def shutdown_producers @producers_mutex.synchronize { shutdown_threads = @producers.map { |key, producer| th = Thread.new { unless producer.close(10) log.warn("Queue is forcefully closed after 10 seconds wait") end } th.abort_on_exception = true th } shutdown_threads.each { |th| th.join } @producers = {} } end |
#start ⇒ Object
175 176 177 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 175 def start super end |
#write(chunk) ⇒ Object
255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 |
# File 'lib/fluent/plugin/out_rdkafka.rb', line 255 def write(chunk) tag = chunk.key def_topic = @default_topic || tag record_buf = nil record_buf_bytes = nil begin chunk.msgpack_each.map { |time, record| begin if @output_include_time if @time_format record['time'.freeze] = Time.at(time).strftime(@time_format) else record['time'.freeze] = time end end record['tag'] = tag if @output_include_tag topic = (@exclude_topic_key ? record.delete('topic'.freeze) : record['topic'.freeze]) || def_topic partition = (@exclude_partition ? record.delete('partition'.freeze) : record['partition'.freeze]) || @default_partition = (@exclude_message_key ? record.delete('message_key'.freeze) : record['message_key'.freeze]) || @default_message_key record_buf = @formatter_proc.call(tag, time, record) record_buf_bytes = record_buf.bytesize if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes log.warn "record size exceeds max_send_limit_bytes. Skip event:", :time => time, :record_size => record_buf_bytes log.debug "Skipped event:", :record => record next end rescue StandardError => e log.warn "unexpected error during format record. Skip broken event:", :error => e.to_s, :error_class => e.class.to_s, :time => time, :record => record next end producer = get_producer handler = enqueue_with_retry(producer, topic, record_buf, , partition, time) handler }.each { |handler| handler.wait(max_wait_timeout: @rdkafka_delivery_handle_poll_timeout) if @rdkafka_delivery_handle_poll_timeout != 0 } end rescue Exception => e log.warn "Send exception occurred: #{e} at #{e.backtrace.first}" # Raise exception to retry sendind messages raise e end |