Class: RedshiftConnector::Connector
- Inherits:
-
Object
- Object
- RedshiftConnector::Connector
- Defined in:
- lib/redshift_connector/connector.rb
Constant Summary collapse
- DEFAULT_BATCH_SIZE =
1000
Class Method Summary collapse
- .transport_all(strategy: 'rename', schema:, table: nil, src_table: table, dest_table: table, columns:, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
- .transport_all_from_s3(strategy: 'rename', table:, columns:, bucket: nil, prefix:, format:, filter: nil, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
- .transport_delta(schema:, table: nil, src_table: table, dest_table: table, condition:, columns:, delete_cond: nil, upsert_columns: nil, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
- .transport_delta_from_s3(bucket: nil, prefix:, format:, filter: nil, table:, columns:, delete_cond: nil, upsert_columns: nil, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
Instance Method Summary collapse
- #execute ⇒ Object
- #export ⇒ Object
- #export_enabled? ⇒ Boolean
- #import ⇒ Object
- #import_enabled? ⇒ Boolean
-
#initialize(exporter:, importer:, filter: nil, logger:) ⇒ Connector
constructor
A new instance of Connector.
Constructor Details
#initialize(exporter:, importer:, filter: nil, logger:) ⇒ Connector
Returns a new instance of Connector.
160 161 162 163 164 165 166 |
# File 'lib/redshift_connector/connector.rb', line 160 def initialize(exporter:, importer:, filter: nil, logger:) @exporter = exporter @importer = importer @filter = filter @logger = logger @bundle = nil end |
Class Method Details
.transport_all(strategy: 'rename', schema:, table: nil, src_table: table, dest_table: table, columns:, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
# File 'lib/redshift_connector/connector.rb', line 118 def Connector.transport_all( strategy: 'rename', schema:, table: nil, src_table: table, dest_table: table, columns:, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false ) raise ArgumentError, "filter and enable_cast are exclusive" if filter and enable_cast raise ArgumentError, "either filter or enable_cast is required" unless filter or enable_cast logger = NullLogger.new if quiet bundle_params = DataFileBundleParams.new( bucket: bucket, schema: schema, table: src_table, txn_id: txn_id, logger: logger ) exporter = Exporter.for_table( bundle_params: bundle_params, schema: schema, table: src_table, columns: columns, enable_cast: enable_cast, logger: logger ) importer = Importer.for_rebuild( strategy: strategy, table: dest_table, columns: columns, logger: logger ) new(exporter: exporter, importer: importer, filter: filter, logger: logger) end |
.transport_all_from_s3(strategy: 'rename', table:, columns:, bucket: nil, prefix:, format:, filter: nil, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
# File 'lib/redshift_connector/connector.rb', line 90 def Connector.transport_all_from_s3( strategy: 'rename', table:, columns:, bucket: nil, prefix:, format:, filter: nil, logger: RedshiftConnector.logger, quiet: false ) logger = NullLogger.new if quiet bundle = S3DataFileBundle.for_prefix( bucket: (bucket ? S3Bucket.get(bucket) : S3Bucket.default), prefix: prefix, format: format, logger: logger ) exporter = ImmediateExporter.new(bundle: bundle, logger: logger) importer = Importer.for_rebuild( strategy: strategy, table: table, columns: columns, logger: logger ) new(exporter: exporter, importer: importer, filter: filter, logger: logger) end |
.transport_delta(schema:, table: nil, src_table: table, dest_table: table, condition:, columns:, delete_cond: nil, upsert_columns: nil, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
# File 'lib/redshift_connector/connector.rb', line 41 def Connector.transport_delta( schema:, table: nil, src_table: table, dest_table: table, condition:, columns:, delete_cond: nil, upsert_columns: nil, bucket: nil, txn_id: nil, filter: nil, enable_cast: false, logger: RedshiftConnector.logger, quiet: false ) unless src_table and dest_table raise ArgumentError, "missing :table, :src_table or :dest_table" end raise ArgumentError, "filter and enable_cast are exclusive" if filter and enable_cast raise ArgumentError, "either filter or enable_cast is required" unless filter or enable_cast logger = NullLogger.new if quiet bundle_params = DataFileBundleParams.new( bucket: bucket, schema: schema, table: src_table, txn_id: txn_id, logger: logger ) exporter = Exporter.for_table_delta( bundle_params: bundle_params, schema: schema, table: src_table, columns: columns, condition: condition, enable_cast: enable_cast, logger: logger ) importer = Importer.for_delta_upsert( table: dest_table, columns: columns, delete_cond: delete_cond, upsert_columns: upsert_columns, logger: logger ) new(exporter: exporter, importer: importer, filter: filter, logger: logger) end |
.transport_delta_from_s3(bucket: nil, prefix:, format:, filter: nil, table:, columns:, delete_cond: nil, upsert_columns: nil, logger: RedshiftConnector.logger, quiet: false) ⇒ Object
11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
# File 'lib/redshift_connector/connector.rb', line 11 def Connector.transport_delta_from_s3( bucket: nil, prefix:, format:, filter: nil, table:, columns:, delete_cond: nil, upsert_columns: nil, logger: RedshiftConnector.logger, quiet: false ) logger = NullLogger.new if quiet bundle = S3DataFileBundle.for_prefix( bucket: (bucket ? S3Bucket.get(bucket) : S3Bucket.default), prefix: prefix, format: format, logger: logger ) exporter = ImmediateExporter.new(bundle: bundle, logger: logger) importer = Importer.for_delta_upsert( table: table, columns: columns, delete_cond: delete_cond, upsert_columns: upsert_columns, logger: logger ) new(exporter: exporter, importer: importer, filter: filter, logger: logger) end |
Instance Method Details
#execute ⇒ Object
176 177 178 179 |
# File 'lib/redshift_connector/connector.rb', line 176 def execute export if export_enabled? import if import_enabled? end |
#export ⇒ Object
181 182 183 184 |
# File 'lib/redshift_connector/connector.rb', line 181 def export @logger.info "==== export task ==================================================" @bundle = @exporter.execute end |
#export_enabled? ⇒ Boolean
168 169 170 |
# File 'lib/redshift_connector/connector.rb', line 168 def export_enabled? not ENV['IMPORT_ONLY'] end |
#import ⇒ Object
188 189 190 191 192 193 194 195 196 197 |
# File 'lib/redshift_connector/connector.rb', line 188 def import @logger.info "==== import task ==================================================" r = DataFileBundleReader.new( @bundle, filter: @filter, batch_size: DEFAULT_BATCH_SIZE, logger: @logger ) @importer.execute(r) end |
#import_enabled? ⇒ Boolean
172 173 174 |
# File 'lib/redshift_connector/connector.rb', line 172 def import_enabled? not ENV['EXPORT_ONLY'] end |