Class: RightDevelop::S3::Interface
- Inherits:
-
Object
- Object
- RightDevelop::S3::Interface
- Defined in:
- lib/right_develop/s3/interface.rb
Overview
Provides a Ruby OOP interface to Amazon S3.
Note: filters are used as options for multiple storage actions below and refers to an array of Regexp or wildcard-style filter strings (e.g. ‘*.txt’). they are used to match file paths relative to a given subdirectory or else from the root of the bucket or directory on disk).
Constant Summary collapse
- NO_SLASHES_REGEXP =
/^[^\/]+$/
- DEFAULT_OPTIONS =
{ :filters => nil, :subdirectory => nil, :recursive => true, :aws_access_key_id => nil, :aws_secret_access_key => nil, :logger => nil }.freeze
Instance Attribute Summary collapse
-
#logger ⇒ Object
Returns the value of attribute logger.
Instance Method Summary collapse
-
#delete_files(bucket, options = {}) ⇒ Fixnum
Deletes all files from the given bucket.
-
#download_files(bucket, to_dir_path, options = {}) ⇒ Fixnum
Downloads all files from the given bucket to the given directory.
-
#initialize(options = {}) ⇒ Interface
constructor
A new instance of Interface.
-
#list_files(bucket, options = {}) ⇒ Array
Lists the files in the given bucket.
-
#upload_files(bucket, from_dir_path, options = {}) ⇒ Fixnum
Uploads all files from the given directory (ignoring any empty directories) to the given bucket.
Constructor Details
#initialize(options = {}) ⇒ Interface
Returns a new instance of Interface.
49 50 51 52 53 54 55 56 57 58 59 60 61 |
# File 'lib/right_develop/s3/interface.rb', line 49 def initialize(={}) = DEFAULT_OPTIONS.merge() aws_access_key_id = [:aws_access_key_id] aws_secret_access_key = [:aws_secret_access_key] unless aws_access_key_id && aws_secret_access_key raise ::ArgumentError, 'Missing one or both mandatory options - :aws_access_key_id and :aws_secret_access_key' end @logger = [:logger] || Logger.new(STDOUT) @s3 = ::RightAws::S3Interface.new(aws_access_key_id, aws_secret_access_key, :logger => @logger) end |
Instance Attribute Details
#logger ⇒ Object
Returns the value of attribute logger.
63 64 65 |
# File 'lib/right_develop/s3/interface.rb', line 63 def logger @logger end |
Instance Method Details
#delete_files(bucket, options = {}) ⇒ Fixnum
Deletes all files from the given bucket.
199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 |
# File 'lib/right_develop/s3/interface.rb', line 199 def delete_files(bucket, ={}) = DEFAULT_OPTIONS.dup.merge() prefix = normalize_subdirectory_path([:subdirectory]) files = list_files(bucket, ) if files.empty? logger.info("No files found in \"#{bucket}/#{prefix}\"") else logger.info("Deleting #{files.count} files...") files.each do |path| @s3.delete(bucket, "#{prefix}#{path}") logger.info("Deleted \"#{bucket}/#{prefix}#{path}\"") end end return files.size end |
#download_files(bucket, to_dir_path, options = {}) ⇒ Fixnum
Downloads all files from the given bucket to the given directory.
108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
# File 'lib/right_develop/s3/interface.rb', line 108 def download_files(bucket, to_dir_path, ={}) = DEFAULT_OPTIONS.dup.merge() prefix = normalize_subdirectory_path([:subdirectory]) files = list_files(bucket, ) if files.empty? logger.info("No files found in \"#{bucket}/#{prefix}\"") else logger.info("Downloading #{files.count} files...") prefix = normalize_subdirectory_path([:subdirectory]) downloaded = 0 files.each do |path| key = "#{prefix}#{path}" to_file_path = File.join(to_dir_path, path) parent_path = File.dirname(to_file_path) FileUtils.mkdir_p(parent_path) unless File.directory?(parent_path) disk_file = to_file_path file_md5 = File.exist?(disk_file) && Digest::MD5.hexdigest(File.read(disk_file)) if file_md5 head = @s3.head(bucket, key) rescue nil key_md5 = head && head['etag'].gsub(/[^0-9a-fA-F]/, '') skip = (key_md5 == file_md5) end if skip logger.info("Skipping #{bucket}/#{key} (identical contents)") else logger.info("Downloading #{bucket}/#{key}") ::File.open(to_file_path, 'wb') do |f| @s3.get(bucket, key) { |chunk| f.write(chunk) } end downloaded += 1 end logger.info("Downloaded to \"#{to_file_path}\"") end end downloaded end |
#list_files(bucket, options = {}) ⇒ Array
Lists the files in the given bucket.
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
# File 'lib/right_develop/s3/interface.rb', line 72 def list_files(bucket, ={}) = DEFAULT_OPTIONS.dup.merge() prefix = normalize_subdirectory_path([:subdirectory]) filters = normalize_filters() files = [] trivial_filters = filters.select { |filter| filter.is_a?(String) } if trivial_filters.empty? @s3.incrementally_list_bucket(bucket, 'prefix' => prefix) do |response| incremental_files = response[:contents].map do |details| details[:key][(prefix.length)..-1] end files += filter_files(incremental_files, filters) end else trivial_filters.each do |filename| begin # use head to query file existence. @s3.head(bucket, "#{prefix}#{filename}") files << filename rescue RightAws::AwsError => e # do nothing if file not found raise unless '404' == e.http_code end end end return files end |
#upload_files(bucket, from_dir_path, options = {}) ⇒ Fixnum
Uploads all files from the given directory (ignoring any empty directories) to the given bucket.
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 |
# File 'lib/right_develop/s3/interface.rb', line 160 def upload_files(bucket, from_dir_path, ={}) Dir.chdir(from_dir_path) do logger.info("Working in #{Dir.pwd.inspect}") = DEFAULT_OPTIONS.dup.merge() prefix = normalize_subdirectory_path([:subdirectory]) filters = normalize_filters() pattern = [:recursive] ? '**/*' : '*' files = Dir.glob(pattern).select { |path| File.file?(path) } filter_files(files, filters) access = normalize_access() uploaded = 0 files.each do |path| key = "#{prefix}#{path}" file_md5 = Digest::MD5.hexdigest(File.read(path)) File.open(path, 'rb') do |f| head = @s3.head(bucket, key) rescue nil key_md5 = head && head['etag'].gsub(/[^0-9a-fA-F]/, '') if file_md5 == key_md5 logger.info("Skipping #{bucket}/#{key} (identical contents)") else logger.info("Uploading to #{bucket}/#{key}") @s3.put(bucket, key, f, 'x-amz-acl' => access) uploaded += 1 end end end uploaded end end |