Class: Fluent::Rdkafka2Output
- Inherits:
-
Output
- Object
- Output
- Fluent::Rdkafka2Output
- Defined in:
- lib/fluent/plugin/out_rdkafka2.rb
Constant Summary
Constants included from KafkaPluginUtil::SSLSettings
KafkaPluginUtil::SSLSettings::DummyFormatter
Instance Method Summary collapse
- #build_config ⇒ Object
- #configure(conf) ⇒ Object
- #enqueue_with_retry(producer, topic, record_buf, message_key, partition) ⇒ Object
- #get_producer ⇒ Object
-
#initialize ⇒ Rdkafka2Output
constructor
A new instance of Rdkafka2Output.
- #multi_workers_ready? ⇒ Boolean
- #setup_formatter(conf) ⇒ Object
- #shutdown ⇒ Object
- #shutdown_producers ⇒ Object
- #start ⇒ Object
- #write(chunk) ⇒ Object
Methods included from KafkaPluginUtil::SaslSettings
Methods included from KafkaPluginUtil::SSLSettings
included, #pickup_ssl_endpoint, #read_ssl_file
Constructor Details
#initialize ⇒ Rdkafka2Output
Returns a new instance of Rdkafka2Output.
94 95 96 97 98 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 94 def initialize super @producers = {} @producers_mutex = Mutex.new end |
Instance Method Details
#build_config ⇒ Object
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 134 def build_config config = {:"bootstrap.servers" => @brokers} if @ssl_ca_cert && @ssl_ca_cert[0] ssl = true config[:"ssl.ca.location"] = @ssl_ca_cert[0] config[:"ssl.certificate.location"] = @ssl_client_cert if @ssl_client_cert config[:"ssl.key.location"] = @ssl_client_cert_key if @ssl_client_cert_key config[:"ssl.key.password"] = @ssl_client_cert_key_password if @ssl_client_cert_key_password end if @principal sasl = true config[:"sasl.mechanisms"] = "GSSAPI" config[:"sasl.kerberos.principal"] = @principal config[:"sasl.kerberos.service.name"] = @service_name if @service_name config[:"sasl.kerberos.keytab"] = @keytab if @keytab end if ssl && sasl security_protocol = "SASL_SSL" elsif ssl && !sasl security_protocol = "SSL" elsif !ssl && sasl security_protocol = "SASL_PLAINTEXT" else security_protocol = "PLAINTEXT" end config[:"security.protocol"] = security_protocol config[:"compression.codec"] = @compression_codec if @compression_codec config[:"message.send.max.retries"] = @max_send_retries if @max_send_retries config[:"request.required.acks"] = @required_acks if @required_acks config[:"request.timeout.ms"] = @ack_timeout * 1000 if @ack_timeout config[:"queue.buffering.max.ms"] = @rdkafka_buffering_max_ms if @rdkafka_buffering_max_ms config[:"queue.buffering.max.messages"] = @rdkafka_buffering_max_messages if @rdkafka_buffering_max_messages config[:"message.max.bytes"] = @rdkafka_message_max_bytes if @rdkafka_message_max_bytes config[:"batch.num.messages"] = @rdkafka_message_max_num if @rdkafka_message_max_num @rdkafka_options.each { |k, v| config[k.to_sym] = v } config end |
#configure(conf) ⇒ Object
100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 100 def configure(conf) super log.instance_eval { def add(level, &block) if block self.info(block.call) end end } Rdkafka::Config.logger = log config = build_config @rdkafka = Rdkafka::Config.new(config) if @default_topic.nil? if @chunk_keys.include?(@topic_key) && !@chunk_key_tag log.warn "Use '#{@topic_key}' field of event record for topic but no fallback. Recommend to set default_topic or set 'tag' in buffer chunk keys like <buffer #{@topic_key},tag>" end else if @chunk_key_tag log.warn "default_topic is set. Fluentd's event tag is not used for topic" end end formatter_conf = conf.elements('format').first unless formatter_conf raise Fluent::ConfigError, "<format> section is required." end unless formatter_conf["@type"] raise Fluent::ConfigError, "format/@type is required." end @formatter_proc = setup_formatter(formatter_conf) @topic_key_sym = @topic_key.to_sym end |
#enqueue_with_retry(producer, topic, record_buf, message_key, partition) ⇒ Object
272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 272 def enqueue_with_retry(producer, topic, record_buf, , partition) attempt = 0 loop do begin return producer.produce(topic: topic, payload: record_buf, key: , partition: partition) rescue Exception => e if e.code == :queue_full if attempt <= @max_enqueue_retries log.warn "Failed to enqueue message; attempting retry #{attempt} of #{@max_enqueue_retries} after #{@enqueue_retry_backoff}s" sleep @enqueue_retry_backoff attempt += 1 else raise "Failed to enqueue message although tried retry #{@max_enqueue_retries} times" end else raise e end end end end |
#get_producer ⇒ Object
209 210 211 212 213 214 215 216 217 218 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 209 def get_producer @producers_mutex.synchronize { producer = @producers[Thread.current.object_id] unless producer producer = @rdkafka.producer @producers[Thread.current.object_id] = producer end producer } end |
#multi_workers_ready? ⇒ Boolean
184 185 186 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 184 def multi_workers_ready? true end |
#setup_formatter(conf) ⇒ Object
220 221 222 223 224 225 226 227 228 229 230 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 220 def setup_formatter(conf) type = conf['@type'] case type when 'ltsv' require 'ltsv' Proc.new { |tag, time, record| LTSV.dump(record) } else @formatter = formatter_create(usage: 'rdkafka-plugin', conf: conf) @formatter.method(:format) end end |
#shutdown ⇒ Object
188 189 190 191 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 188 def shutdown super shutdown_producers end |
#shutdown_producers ⇒ Object
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 193 def shutdown_producers @producers_mutex.synchronize { shutdown_threads = @producers.map { |key, producer| th = Thread.new { unless producer.close(10) log.warn("Queue is forcefully closed after 10 seconds wait") end } th.abort_on_exception = true th } shutdown_threads.each { |th| th.join } @producers = {} } end |
#start ⇒ Object
180 181 182 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 180 def start super end |
#write(chunk) ⇒ Object
232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 |
# File 'lib/fluent/plugin/out_rdkafka2.rb', line 232 def write(chunk) tag = chunk..tag topic = chunk..variables[@topic_key_sym] || @default_topic || tag handlers = [] record_buf = nil record_buf_bytes = nil begin producer = get_producer chunk.msgpack_each { |time, record| begin record = inject_values_to_record(tag, time, record) record.delete(@topic_key) if @exclude_topic_key partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key record_buf = @formatter_proc.call(tag, time, record) record_buf_bytes = record_buf.bytesize if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes log.warn "record size exceeds max_send_limit_bytes. Skip event:", :time => time, :record => record next end rescue StandardError => e log.warn "unexpected error during format record. Skip broken event:", :error => e.to_s, :error_class => e.class.to_s, :time => time, :record => record next end handlers << enqueue_with_retry(producer, topic, record_buf, , partition) } handlers.each { |handler| handler.wait(@rdkafka_delivery_handle_poll_timeout) if @rdkafka_delivery_handle_poll_timeout != 0 } end rescue Exception => e log.warn "Send exception occurred: #{e} at #{e.backtrace.first}" # Raise exception to retry sendind messages raise e end |