Class: Fluent::ElasticsearchOutput
- Inherits:
-
BufferedOutput
- Object
- BufferedOutput
- Fluent::ElasticsearchOutput
show all
- Includes:
- SetTagKeyMixin
- Defined in:
- lib/fluent/plugin/out_elasticsearch.rb
Defined Under Namespace
Classes: ConnectionFailure
Instance Method Summary
collapse
Constructor Details
Returns a new instance of ElasticsearchOutput.
38
39
40
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 38
def initialize
super
end
|
Instance Method Details
#client ⇒ Object
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 50
def client
@_es ||= begin
adapter_conf = lambda {|f| f.adapter :excon }
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(get_connection_options.merge(
options: {
reload_connections: @reload_connections,
reload_on_failure: @reload_on_failure,
retry_on_failure: 5,
transport_options: {
request: { timeout: @request_timeout },
ssl: { verify: @ssl_verify }
}
}), &adapter_conf)
es = Elasticsearch::Client.new transport: transport
begin
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})!" unless es.ping
rescue *es.transport.host_unreachable_exceptions => e
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description})! #{e.message}"
end
log.info "Connection opened to Elasticsearch cluster => #{connection_options_description}"
es
end
end
|
42
43
44
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 42
def configure(conf)
super
end
|
#connection_options_description ⇒ Object
109
110
111
112
113
114
115
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 109
def connection_options_description
get_connection_options[:hosts].map do |host_info|
attributes = host_info.dup
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
attributes.inspect
end.join(', ')
end
|
117
118
119
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 117
def format(tag, time, record)
[tag, time, record].to_msgpack
end
|
#get_connection_options ⇒ Object
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 76
def get_connection_options
raise "`password` must be present if `user` is present" if @user && !@password
hosts = if @hosts
@hosts.split(',').map do |host_str|
if host_str.match(%r{^[^:]+(\:\d+)?$})
{
host: host_str.split(':')[0],
port: (host_str.split(':')[1] || @port).to_i,
scheme: @scheme
}
else
uri = URI(host_str)
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
end
end.compact
else
[{host: @host, port: @port, scheme: @scheme}]
end.each do |host|
host.merge!(user: @user, password: @password) if !host[:user] && @user
host.merge!(path: @path) if !host[:path] && @path
end
{
hosts: hosts
}
end
|
#send(data) ⇒ Object
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 173
def send(data)
retries = 0
begin
client.bulk body: data
rescue *client.transport.host_unreachable_exceptions => e
if retries < 2
retries += 1
@_es = nil
log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
sleep 2**retries
retry
end
raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
end
end
|
#shutdown ⇒ Object
121
122
123
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 121
def shutdown
super
end
|
#start ⇒ Object
46
47
48
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 46
def start
super
end
|
#write(chunk) ⇒ Object
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
|
# File 'lib/fluent/plugin/out_elasticsearch.rb', line 125
def write(chunk)
bulk_message = []
chunk.msgpack_each do |tag, time, record|
next unless record.is_a? Hash
if @logstash_format
if record.has_key?("@timestamp")
time = Time.parse record["@timestamp"]
elsif record.has_key?(@time_key)
time = Time.parse record[@time_key]
record['@timestamp'] = record[@time_key]
else
record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
end
if @utc_index
target_index = "#{@logstash_prefix}-#{Time.at(time).getutc.strftime("#{@logstash_dateformat}")}"
else
target_index = "#{@logstash_prefix}-#{Time.at(time).strftime("#{@logstash_dateformat}")}"
end
else
target_index = @index_name
end
if @include_tag_key
record.merge!(@tag_key => tag)
end
meta = { "index" => {"_index" => target_index, "_type" => type_name} }
if @id_key && record[@id_key]
meta['index']['_id'] = record[@id_key]
end
if @parent_key && record[@parent_key]
meta['index']['_parent'] = record[@parent_key]
end
if ObjectSpace.memsize_of(record) < @max_size
bulk_message << meta
bulk_message << record
else
log.info "Could not send log to Elasticsearch: the size of log exceeded max_size"
end
end
send(bulk_message) unless bulk_message.empty?
bulk_message.clear
end
|