Class: LogStash::Outputs::MongodbCustom

Inherits:
Base
  • Object
show all
Defined in:
lib/logstash/outputs/mongodb_custom.rb

Overview

This output writes events to MongoDB.

Constant Summary collapse

@@mutex =

Mutex used to synchronize access to ‘documents’

Mutex.new

Instance Method Summary collapse

Instance Method Details

#closeObject



140
141
142
143
144
# File 'lib/logstash/outputs/mongodb_custom.rb', line 140

def close
  @closed.make_true
  @bulk_thread.wakeup
  @bulk_thread.join
end

#receive(event) ⇒ Object



79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
# File 'lib/logstash/outputs/mongodb_custom.rb', line 79

def receive(event)
  begin
    # Our timestamp object now has a to_bson method, using it here
    # {}.merge(other) so we don't taint the event hash innards
    document = {}.merge(event.to_hash)

    if !@isodate
      timestamp = event.timestamp
      if timestamp
        # not using timestamp.to_bson
        document["@timestamp"] = timestamp.to_json
      else
        @logger.warn("Cannot set MongoDB document `@timestamp` field because it does not exist in the event", :event => event)
      end
    end

    if @date_keys
      keys = date_keys.to_s.split(",")
      document.each do |key, value|
        if keys.index key
          document[key] = LogStash::Timestamp.new(value)
        end
      end
    end

    if @generateId
      document["_id"] = BSON::ObjectId.new
    end

    if @bulk
      collection = event.sprintf(@collection)
      @@mutex.synchronize do
        if(!@documents[collection])
          @documents[collection] = []
        end
        @documents[collection].push(document)

        if(@documents[collection].length >= @bulk_size)
          @db[collection].insert_many(@documents[collection])
          @documents.delete(collection)
        end
      end
    else
      @db[event.sprintf(@collection)].insert_one(document)
    end
  rescue => e
    if e.message =~ /^E11000/
      # On a duplicate key error, skip the insert.
      # We could check if the duplicate key err is the _id key
      # and generate a new primary key.
      # If the duplicate key error is on another field, we have no way
      # to fix the issue.
      @logger.warn("Skipping insert because of a duplicate key error", :event => event, :exception => e)
    else
      @logger.warn("Failed to send event to MongoDB, retrying in #{@retry_delay.to_s} seconds", :event => event, :exception => e)
      sleep(@retry_delay)
      retry
    end
  end
end

#registerObject



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/logstash/outputs/mongodb_custom.rb', line 51

def register
  if @bulk_size > 1000
    raise LogStash::ConfigurationError, "Bulk size must be lower than '1000', currently '#{@bulk_size}'"
  end

  Mongo::Logger.logger = @logger
  conn = Mongo::Client.new(@uri)
  @db = conn.use(@database)

  @closed = Concurrent::AtomicBoolean.new(false)
  @documents = {}

  @bulk_thread = Thread.new(@bulk_interval) do |bulk_interval|
    while @closed.false? do
      sleep(bulk_interval)

      @@mutex.synchronize do
        @documents.each do |collection, values|
          if values.length > 0
            @db[collection].insert_many(values)
            @documents.delete(collection)
          end
        end
      end
    end
  end
end