Class: FluentPluginKinesis::OutputFilter
- Inherits:
-
Fluent::BufferedOutput
- Object
- Fluent::BufferedOutput
- FluentPluginKinesis::OutputFilter
- Includes:
- Fluent::DetachMultiProcessMixin, Fluent::SetTagKeyMixin, Fluent::SetTimeKeyMixin
- Defined in:
- lib/fluent/plugin/out_kinesis.rb
Constant Summary collapse
- USER_AGENT_NAME =
'fluent-plugin-kinesis-output-filter'
- PROC_BASE_STR =
'proc {|record| %s }'
- PUT_RECORDS_MAX_COUNT =
500
- PUT_RECORD_MAX_DATA_SIZE =
1024 * 1024
- PUT_RECORDS_MAX_DATA_SIZE =
1024 * 1024 * 5
Instance Method Summary collapse
- #configure(conf) ⇒ Object
- #format(tag, time, record) ⇒ Object
- #start ⇒ Object
- #write(chunk) ⇒ Object
Instance Method Details
#configure(conf) ⇒ Object
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
# File 'lib/fluent/plugin/out_kinesis.rb', line 64 def configure(conf) super validate_params if @detach_process or (@num_threads > 1) @parallel_mode = true if @detach_process @use_detach_multi_process_mixin = true end else @parallel_mode = false end if @parallel_mode if @order_events log.warn 'You have set "order_events" to true, however this configuration will be ignored due to "detach_process" and/or "num_threads".' end @order_events = false end if @partition_key_expr partition_key_proc_str = sprintf( PROC_BASE_STR, @partition_key_expr ) @partition_key_proc = eval(partition_key_proc_str) end if @explicit_hash_key_expr explicit_hash_key_proc_str = sprintf( PROC_BASE_STR, @explicit_hash_key_expr ) @explicit_hash_key_proc = eval(explicit_hash_key_proc_str) end @dump_class = @use_yajl ? Yajl : JSON end |
#format(tag, time, record) ⇒ Object
109 110 111 112 113 114 115 116 117 118 119 120 |
# File 'lib/fluent/plugin/out_kinesis.rb', line 109 def format(tag, time, record) data = { data: @dump_class.dump(record), partition_key: get_key(:partition_key,record) } if @explicit_hash_key or @explicit_hash_key_proc data[:explicit_hash_key] = get_key(:explicit_hash_key,record) end data.to_msgpack end |
#start ⇒ Object
101 102 103 104 105 106 107 |
# File 'lib/fluent/plugin/out_kinesis.rb', line 101 def start detach_multi_process do super load_client check_connection_to_stream end end |
#write(chunk) ⇒ Object
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
# File 'lib/fluent/plugin/out_kinesis.rb', line 122 def write(chunk) data_list = chunk.to_enum(:msgpack_each).find_all{|record| unless record_exceeds_max_size?(record['data']) true else log.error sprintf('Record exceeds the %.3f KB(s) per-record size limit and will not be delivered: %s', PUT_RECORD_MAX_DATA_SIZE / 1024.0, record['data']) false end }.map{|record| build_data_to_put(record) } if @order_events put_record_for_order_events(data_list) else records_array = build_records_array_to_put(data_list) records_array.each{|records| put_records_with_retry(records) } end end |