Class: RelationalExporter::Runner

Inherits:
Object
  • Object
show all
Defined in:
lib/relational_exporter.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ Runner

Returns a new instance of Runner.



14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# File 'lib/relational_exporter.rb', line 14

def initialize(options={})
  @logger = options[:logger] ? options[:logger] : Logger.new(STDERR)

  @connection_config = options[:connection_config]
  begin
    ActiveRecord::Base.establish_connection @connection_config
    ActiveRecord::Base.connection.active?
  rescue Exception => e
    raise "Database connection failed: #{e.message}"
  end

  @schema = Hashie::Mash.new(options[:schema] || YAML.load_file(options[:schema_file]))

  load_models
end

Instance Attribute Details

#loggerObject

Returns the value of attribute logger.



12
13
14
# File 'lib/relational_exporter.rb', line 12

def logger
  @logger
end

#schemaObject

Returns the value of attribute schema.



12
13
14
# File 'lib/relational_exporter.rb', line 12

def schema
  @schema
end

Instance Method Details

#export(options, &block) ⇒ Object



30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# File 'lib/relational_exporter.rb', line 30

def export(options, &block)
  ActiveRecord::Base.logger = @logger
  Celluloid.logger = @logger

  options = Hashie::Mash.new options

  main_klass = options.output.model.to_s.classify.constantize

  main_klass.set_scope_from_hash options.output.scope.as_json

  total_records = main_klass.find_all_by_scope(options.output.scope.as_json).count
  remaining_records = total_records

  csv_builder = RelationalExporter::CsvBuilder.new options.file_path
  Celluloid::Actor[:csv_builder] = csv_builder
  result = csv_builder.future.start
  pool_size = options.workers || 10
  pool = RelationalExporter::RecordWorker.pool(size: pool_size)
  get_headers = true

  record_sequence = -1
  batch_count = 0

  batch_options = Hashie::Mash.new({batch_size: 100}.merge(options.batch_options || {}))
  limit = options.limit.nil? ? nil : options.limit.to_i
  max_records = limit.nil? ? total_records : [limit, total_records].min

  @logger.info "CSV export will process #{max_records} of #{total_records} total records."

  all_bm = Benchmark.measure do
    catch(:hit_limit) do
      main_klass.find_all_by_scope(options.output.scope.as_json).find_in_batches(batch_options.to_h.symbolize_keys) do |records|
        batch_count+=1
        batch_bm = Benchmark.measure do
          records.each do |record|
            record_sequence += 1
            remaining_records -= 1

            args = [record_sequence, record, options.output.associations, get_headers]
            if get_headers
              pool.get_csv_row(*args)
              get_headers = false
            else
              pool.async.get_csv_row(*args)
            end

            throw :hit_limit if !limit.nil? && (record_sequence == max_records)
          end
        end

        @logger.debug "Batch of #{records.size} queued. #{remaining_records} remaining. Benchmark: #{batch_bm}"
      end
    end

    csv_builder.end_index = record_sequence

    @logger.info "CSV export complete <#{options.file_path}>" if result.value === true
  end

  @logger.debug "#{batch_count} batches processed. Benchmark: #{all_bm}"

  pool.terminate
  csv_builder.terminate
end