Class: RelationalExporter::Runner
- Inherits:
-
Object
- Object
- RelationalExporter::Runner
- Defined in:
- lib/relational_exporter.rb
Instance Attribute Summary collapse
-
#logger ⇒ Object
Returns the value of attribute logger.
-
#schema ⇒ Object
Returns the value of attribute schema.
Instance Method Summary collapse
- #export(options, &block) ⇒ Object
-
#initialize(options = {}) ⇒ Runner
constructor
A new instance of Runner.
Constructor Details
#initialize(options = {}) ⇒ Runner
Returns a new instance of Runner.
14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
# File 'lib/relational_exporter.rb', line 14 def initialize(={}) @logger = [:logger] ? [:logger] : Logger.new(STDERR) @connection_config = [:connection_config] begin ActiveRecord::Base.establish_connection @connection_config ActiveRecord::Base.connection.active? rescue Exception => e raise "Database connection failed: #{e.}" end @schema = Hashie::Mash.new([:schema] || YAML.load_file([:schema_file])) load_models end |
Instance Attribute Details
#logger ⇒ Object
Returns the value of attribute logger.
12 13 14 |
# File 'lib/relational_exporter.rb', line 12 def logger @logger end |
#schema ⇒ Object
Returns the value of attribute schema.
12 13 14 |
# File 'lib/relational_exporter.rb', line 12 def schema @schema end |
Instance Method Details
#export(options, &block) ⇒ Object
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
# File 'lib/relational_exporter.rb', line 30 def export(, &block) ActiveRecord::Base.logger = @logger Celluloid.logger = @logger = Hashie::Mash.new main_klass = .output.model.to_s.classify.constantize main_klass.set_scope_from_hash .output.scope.as_json total_records = main_klass.find_all_by_scope(.output.scope.as_json).count remaining_records = total_records csv_builder = RelationalExporter::CsvBuilder.new .file_path Celluloid::Actor[:csv_builder] = csv_builder result = csv_builder.future.start pool_size = .workers || 10 pool = RelationalExporter::RecordWorker.pool(size: pool_size) get_headers = true record_sequence = -1 batch_count = 0 = Hashie::Mash.new({batch_size: 100}.merge(. || {})) limit = .limit.nil? ? nil : .limit.to_i max_records = limit.nil? ? total_records : [limit, total_records].min @logger.info "CSV export will process #{max_records} of #{total_records} total records." all_bm = Benchmark.measure do catch(:hit_limit) do main_klass.find_all_by_scope(.output.scope.as_json).find_in_batches(.to_h.symbolize_keys) do |records| batch_count+=1 batch_bm = Benchmark.measure do records.each do |record| record_sequence += 1 remaining_records -= 1 args = [record_sequence, record, .output.associations, get_headers] if get_headers pool.get_csv_row(*args) get_headers = false else pool.async.get_csv_row(*args) end throw :hit_limit if !limit.nil? && (record_sequence == max_records) end end @logger.debug "Batch of #{records.size} queued. #{remaining_records} remaining. Benchmark: #{batch_bm}" end end csv_builder.end_index = record_sequence @logger.info "CSV export complete <#{.file_path}>" if result.value === true end @logger.debug "#{batch_count} batches processed. Benchmark: #{all_bm}" pool.terminate csv_builder.terminate end |