Module: Resque::Plugins::BatchedLogging

Defined in:
lib/resque/plugins/batched_logging.rb

Defined Under Namespace

Classes: BatchExists, BatchLoggerProxy

Class Method Summary collapse

Class Method Details

.included(base) ⇒ Object



9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# File 'lib/resque/plugins/batched_logging.rb', line 9

def self.included(base)
  base.class_eval do
    class << self

      # MyJob.batched(:batch_name => "MyCustomBatchName") do
      #   enqueue(1,2,3,{:my => :options})
      # end
      def batched(&block)
        batch_name = self.to_s # Group the batch of jobs by the job's class name
        raise Resque::Plugins::BatchedLogging::BatchExists.new("Batch for '#{batch_name}' exists already") if Resque.redis.get("#{batch_name}:jobcount")

        job_count = 0
        Resque.redis.set("#{batch_name}:jobcount", job_count) # Set the job count right away, because the workers will check for it's existence
        if block_given?
          proxy_obj = Resque::Plugins::BatchedLogging::BatchLoggerProxy.new(self, batch_name)
          proxy_obj.run(&block)
          job_count = proxy_obj.job_count
        else
          raise "Must pass a block through to a batched group of jobs"
        end
        Resque.redis.set("#{batch_name}:jobcount", job_count)
        Resque.enqueue(Resque::Plugins::BatchedLogger, batch_name) # Queue a job to proccess the log information that is stored in redis
      end

      # Plugin.around_hook for wrapping the job in logging code
      def around_perform_log_as_batched(*args)
        batch_name = self.to_s
        # Presence of the jobcount variable means that batched logging is enabled for this job type
        if Resque.redis.get("#{batch_name}:jobcount")
          # Perform our logging
          start_time = Time.now
          run_time = Benchmark.realtime do
            yield
          end
          end_time = Time.now
          # Store, [run_time, start_time, end_time] as an entry into redis under in an array for this specific job type & queue
          # rpush appends to the end of a list in redis
          Resque.redis.rpush("batch_stats:#{batch_name}", [run_time, start_time, end_time].to_json) # Push the values onto the end of the list (will create the list if it doesn't exist)
          # End of our logging
        else
          yield # Just perform the standard job without benchmarking
        end
      end

    end
  end
end