Class: Taps::Push
Instance Attribute Summary
Attributes inherited from Operation
#database_url, #opts, #remote_url, #session_uri
Instance Method Summary
collapse
Methods inherited from Operation
#apply_table_filter, #catch_errors, #close_session, #completed_tables, #compression_disabled?, #db, #default_chunksize, #exclude_tables, #exiting?, factory, #format_number, #http_headers, #indexes_first?, #initialize, #log, #resuming?, #safe_database_url, #safe_remote_url, #safe_url, #server, #session_resource, #set_session, #setup_signal_trap, #skip_schema?, #store_session, #stream_state, #stream_state=, #table_filter, #verify_server
Instance Method Details
#fetch_local_tables_info ⇒ Object
567
568
569
570
571
572
573
|
# File 'lib/taps/operation.rb', line 567
def fetch_local_tables_info
tables_with_counts = {}
db.tables.each do |table|
tables_with_counts[table] = db[table.to_sym.identifier].count
end
apply_table_filter(tables_with_counts)
end
|
#file_prefix ⇒ Object
396
397
398
|
# File 'lib/taps/operation.rb', line 396
def file_prefix
"push"
end
|
#local_tables_info ⇒ Object
550
551
552
|
# File 'lib/taps/operation.rb', line 550
def local_tables_info
opts[:local_tables_info] ||= fetch_local_tables_info
end
|
#push_data ⇒ Object
466
467
468
469
470
471
472
473
474
475
476
477
478
|
# File 'lib/taps/operation.rb', line 466
def push_data
puts "Sending data"
puts "#{tables.size} tables, #{format_number(record_count)} records"
tables.each do |table_name, count|
stream = Taps::DataStream.factory(db,
:table_name => table_name,
:chunksize => default_chunksize)
progress = ProgressBar.new(table_name.to_s, count)
push_data_from_table(stream, progress)
end
end
|
#push_data_from_table(stream, progress) ⇒ Object
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
|
# File 'lib/taps/operation.rb', line 480
def push_data_from_table(stream, progress)
loop do
if exiting?
store_session
exit 0
end
row_size = 0
chunksize = stream.state[:chunksize]
begin
chunksize = Taps::Utils.calculate_chunksize(chunksize) do |c|
stream.state[:chunksize] = c.to_i
encoded_data, row_size, elapsed_time = nil
d1 = c.time_delta do
encoded_data, row_size, elapsed_time = stream.fetch
end
break if stream.complete?
data = nil
d2 = c.time_delta do
data = {
:state => stream.to_hash,
:checksum => Taps::Utils.checksum(encoded_data).to_s
}
end
begin
content, content_type = nil
d3 = c.time_delta do
content, content_type = Taps::Multipart.create do |r|
r.attach :name => :encoded_data,
:payload => encoded_data,
:content_type => 'application/octet-stream'
r.attach :name => :json,
:payload => OkJson.encode(data),
:content_type => 'application/json'
end
end
session_resource['push/table'].post(content, (:content_type => content_type))
self.stream_state = stream.to_hash
rescue => e
Taps::Utils.reraise_server_exception(e)
end
c.idle_secs = (d1 + d2 + d3)
elapsed_time
end
rescue Taps::CorruptedData => e
next
rescue Taps::DuplicatePrimaryKeyError => e
stream = stream.verify_remote_stream(session_resource['push/verify_stream'], )
next
end
stream.state[:chunksize] = chunksize
progress.inc(row_size)
stream.increment(row_size)
break if stream.complete?
end
progress.finish
completed_tables << stream.table_name.to_s
self.stream_state = {}
end
|
#push_indexes ⇒ Object
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
|
# File 'lib/taps/operation.rb', line 418
def push_indexes
idxs = OkJson.decode(Taps::Utils.schema_bin(:indexes_individual, database_url))
return unless idxs.size > 0
puts "Sending indexes"
apply_table_filter(idxs).each do |table, indexes|
next unless indexes.size > 0
progress = ProgressBar.new(table, indexes.size)
indexes.each do |idx|
session_resource['push/indexes'].post(idx, )
progress.inc(1)
end
progress.finish
end
end
|
#push_partial_data ⇒ Object
455
456
457
458
459
460
461
462
463
464
|
# File 'lib/taps/operation.rb', line 455
def push_partial_data
return if stream_state == {}
table_name = stream_state[:table_name]
record_count = tables[table_name.to_s]
puts "Resuming #{table_name}, #{format_number(record_count)} records"
progress = ProgressBar.new(table_name.to_s, record_count)
stream = Taps::DataStream.factory(db, stream_state)
push_data_from_table(stream, progress)
end
|
#push_reset_sequences ⇒ Object
449
450
451
452
453
|
# File 'lib/taps/operation.rb', line 449
def push_reset_sequences
puts "Resetting sequences"
session_resource['push/reset_sequences'].post('', )
end
|
#push_schema ⇒ Object
436
437
438
439
440
441
442
443
444
445
446
447
|
# File 'lib/taps/operation.rb', line 436
def push_schema
puts "Sending schema"
progress = ProgressBar.new('Schema', tables.size)
tables.each do |table, count|
schema_data = Taps::Utils.schema_bin(:dump_table, database_url, table)
log.debug "Table: #{table}\n#{schema_data}\n"
session_resource['push/schema'].post(schema_data, )
progress.inc(1)
end
progress.finish
end
|
#record_count ⇒ Object
563
564
565
|
# File 'lib/taps/operation.rb', line 563
def record_count
@record_count ||= local_tables_info.values.inject(0) { |a,c| a += c }
end
|
#run ⇒ Object
404
405
406
407
408
409
410
411
412
413
414
415
416
|
# File 'lib/taps/operation.rb', line 404
def run
catch_errors do
unless resuming?
push_schema if !skip_schema?
push_indexes if indexes_first? && !skip_schema?
end
setup_signal_trap
push_partial_data if resuming?
push_data
push_indexes if !indexes_first? && !skip_schema?
push_reset_sequences
end
end
|
#tables ⇒ Object
554
555
556
557
558
559
560
561
|
# File 'lib/taps/operation.rb', line 554
def tables
h = {}
local_tables_info.each do |table_name, count|
next if completed_tables.include?(table_name.to_s)
h[table_name.to_s] = count
end
h
end
|
#to_hash ⇒ Object
400
401
402
|
# File 'lib/taps/operation.rb', line 400
def to_hash
super.merge(:local_tables_info => local_tables_info)
end
|