Class: Aws::S3Interface

Inherits:
AwsBase show all
Includes:
AwsBaseInterface
Defined in:
lib/s3/right_s3_interface.rb

Defined Under Namespace

Classes: S3AclParser, S3BucketLocationParser, S3CopyParser, S3HttpResponseBodyParser, S3HttpResponseHeadParser, S3HttpResponseParser, S3ImprovedListBucketParser, S3ListAllMyBucketsParser, S3ListBucketParser, S3LoggingParser

Constant Summary collapse

USE_100_CONTINUE_PUT_SIZE =
1_000_000
DEFAULT_HOST =
's3.amazonaws.com'
DEFAULT_PORT =
443
DEFAULT_PROTOCOL =
'https'
DEFAULT_SERVICE =
'/'
REQUEST_TTL =
30
DEFAULT_EXPIRES_AFTER =

One day’s worth of seconds

1 * 24 * 60 * 60
ONE_YEAR_IN_SECONDS =
365 * 24 * 60 * 60
AMAZON_HEADER_PREFIX =
'x-amz-'
AMAZON_METADATA_PREFIX =
'x-amz-meta-'
@@bench =
AwsBenchmarkingBlock.new

Constants included from AwsBaseInterface

AwsBaseInterface::DEFAULT_SIGNATURE_VERSION

Constants inherited from AwsBase

AwsBase::AMAZON_PROBLEMS

Instance Attribute Summary

Attributes included from AwsBaseInterface

#aws_access_key_id, #cache, #connection, #last_errors, #last_request, #last_request_id, #last_response, #logger, #params, #signature_version

Class Method Summary collapse

Instance Method Summary collapse

Methods included from AwsBaseInterface

#cache_hits?, caching, caching=, #caching?, #close_conn, #generate_request, #generate_request2, #get_conn, #hash_params, #init, #multi_thread, #on_exception, #request_cache_or_info, #request_info2, #request_info_impl, #request_info_xml_simple, #signed_service_params, #symbolize, #update_cache

Methods inherited from AwsBase

amazon_problems, amazon_problems=

Constructor Details

#initialize(aws_access_key_id = nil, aws_secret_access_key = nil, params = {}) ⇒ S3Interface

Creates new RightS3 instance.

s3 = Aws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<Aws::S3Interface:0xb7b3c27c>

Params is a hash:

{:server       => 's3.amazonaws.com'   # Amazon service host: 's3.amazonaws.com'(default)
 :port         => 443                  # Amazon service port: 80 or 443(default)
 :protocol     => 'https'              # Amazon service protocol: 'http' or 'https'(default)
 :multi_thread => true|false           # Multi-threaded (connection per each thread): true or false(default)
 :logger       => Logger Object}       # Logger instance: logs to STDOUT if omitted }


65
66
67
68
69
70
71
72
73
74
# File 'lib/s3/right_s3_interface.rb', line 65

def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
    init({:name             => 'S3',
          :default_host     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host : DEFAULT_HOST,
          :default_port     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port : DEFAULT_PORT,
          :default_service  => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path : DEFAULT_SERVICE,
          :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL},
         aws_access_key_id || ENV['AWS_ACCESS_KEY_ID'],
         aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'],
         params)
end

Class Method Details

.bench_s3Object



48
49
50
# File 'lib/s3/right_s3_interface.rb', line 48

def self.bench_s3
    @@bench.service
end

.bench_xmlObject



44
45
46
# File 'lib/s3/right_s3_interface.rb', line 44

def self.bench_xml
    @@bench.xml
end

Instance Method Details

#bucket_location(bucket, headers = {}) ⇒ Object

Retrieve bucket location

s3.create_bucket('my-awesome-bucket-us')        #=> true
puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)

s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
puts s3.bucket_location('my-awesome-bucket-eu')            #=> 'EU'


220
221
222
223
224
225
# File 'lib/s3/right_s3_interface.rb', line 220

def bucket_location(bucket, headers={})
    req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
    request_info(req_hash, S3BucketLocationParser.new)
rescue
    on_exception
end

#canonical_string(method, path, headers = {}, expires = nil) ⇒ Object


Requests

Produces canonical string for signing.



85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
# File 'lib/s3/right_s3_interface.rb', line 85

def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
    s3_headers = {}
    headers.each do |key, value|
        key = key.downcase
        s3_headers[key] = value.join("").strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
    end
    s3_headers['content-type'] ||= ''
    s3_headers['content-md5']  ||= ''
    s3_headers['date']           = '' if s3_headers.has_key? 'x-amz-date'
    s3_headers['date']           = expires if expires
    # prepare output string
    out_string = "#{method}\n"
    s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
        out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
    end
    # ignore everything after the question mark...
    out_string << path.gsub(/\?.*$/, '')
    # ...unless there is an acl or torrent parameter
    out_string << '?acl' if path[/[&?]acl($|&|=)/]
    out_string << '?torrent' if path[/[&?]torrent($|&|=)/]
    out_string << '?location' if path[/[&?]location($|&|=)/]
    out_string << '?logging' if path[/[&?]logging($|&|=)/] # this one is beta, no support for now
    out_string
end

#clear_bucket(bucket) ⇒ Object

Removes all keys from bucket. Returns true or an exception.

s3.clear_bucket('my_awesome_bucket') #=> true


770
771
772
773
774
775
776
777
# File 'lib/s3/right_s3_interface.rb', line 770

def clear_bucket(bucket)
    incrementally_list_bucket(bucket) do |results|
        results[:contents].each { |key| delete(bucket, key[:key]) }
    end
    true
rescue
    on_exception
end

#close_connectionObject



77
78
79
# File 'lib/s3/right_s3_interface.rb', line 77

def close_connection
    close_conn :s3_connection
end

#copy(src_bucket, src_key, dest_bucket, dest_key = nil, directive = :copy, headers = {}) ⇒ Object

Copy an object.

directive: :copy    - copy meta-headers from source (default value)
           :replace - replace meta-headers by passed ones

# copy a key with meta-headers
s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}

# copy a key, overwrite meta-headers
s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}

see: docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html

http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html


638
639
640
641
642
643
644
645
646
# File 'lib/s3/right_s3_interface.rb', line 638

def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
    dest_key ||= src_key
    headers['x-amz-metadata-directive'] = directive.to_s.upcase
    headers['x-amz-copy-source']        = "#{src_bucket}/#{CGI::escape src_key}"
    req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
    request_info(req_hash, S3CopyParser.new)
rescue
    on_exception
end

#create_bucket(bucket, headers = {}) ⇒ Object

Creates new bucket. Returns true or an exception.

# create a bucket at American server
s3.create_bucket('my-awesome-bucket-us') #=> true
# create a bucket at European server
s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true


200
201
202
203
204
205
206
207
208
209
210
# File 'lib/s3/right_s3_interface.rb', line 200

def create_bucket(bucket, headers={})
    data = nil
    unless headers[:location].blank?
        data = "<CreateBucketConfiguration><LocationConstraint>#{headers[:location].to_s}</LocationConstraint></CreateBucketConfiguration>"
    end
    req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
    request_info(req_hash, RightHttp2xxParser.new)
rescue Exception => e
    # if the bucket exists AWS returns an error for the location constraint interface. Drop it
    e.is_a?(Aws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true : on_exception
end

Generates link for ‘CreateBucket’.

s3.create_bucket_link('my_awesome_bucket') #=> url string


857
858
859
860
861
# File 'lib/s3/right_s3_interface.rb', line 857

def create_bucket_link(bucket, expires=nil, headers={})
    generate_link('PUT', headers.merge(:url=>bucket), expires)
rescue
    on_exception
end

#delete(bucket, key = '', headers = {}) ⇒ Object

Deletes key. Returns true or an exception.

s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true


618
619
620
621
622
623
# File 'lib/s3/right_s3_interface.rb', line 618

def delete(bucket, key='', headers={})
    req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
    request_info(req_hash, RightHttp2xxParser.new)
rescue
    on_exception
end

#delete_bucket(bucket, headers = {}) ⇒ Object

Deletes new bucket. Bucket must be empty! Returns true or an exception.

s3.delete_bucket('my_awesome_bucket')  #=> true

See also: force_delete_bucket method



264
265
266
267
268
269
# File 'lib/s3/right_s3_interface.rb', line 264

def delete_bucket(bucket, headers={})
    req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
    request_info(req_hash, RightHttp2xxParser.new)
rescue
    on_exception
end

Generates link for ‘DeleteBucket’.

s3.delete_bucket_link('my_awesome_bucket') #=> url string


867
868
869
870
871
# File 'lib/s3/right_s3_interface.rb', line 867

def delete_bucket_link(bucket, expires=nil, headers={})
    generate_link('DELETE', headers.merge(:url=>bucket), expires)
rescue
    on_exception
end

#delete_folder(bucket, folder_key, separator = '/') ⇒ Object

Deletes all keys where the ‘folder_key’ may be assumed as ‘folder’ name. Returns an array of string keys that have been deleted.

s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
s3.delete_folder('my_awesome_bucket','test')                       #=> ['test','test/2/34','test/3']


795
796
797
798
799
800
801
802
803
804
805
806
# File 'lib/s3/right_s3_interface.rb', line 795

def delete_folder(bucket, folder_key, separator='/')
    folder_key.chomp!(separator)
    allkeys = []
    incrementally_list_bucket(bucket, {'prefix' => folder_key}) do |results|
        keys = results[:contents].map { |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil }.compact
        keys.each { |key| delete(bucket, key) }
        allkeys << keys
    end
    allkeys
rescue
    on_exception
end

Generates link for ‘DeleteObject’.

s3.delete_link('my_awesome_bucket',key) #=> url string


926
927
928
929
930
# File 'lib/s3/right_s3_interface.rb', line 926

def delete_link(bucket, key, expires=nil, headers={})
    generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
rescue
    on_exception
end

#fetch_request_params(headers) ⇒ Object

:nodoc:



120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
# File 'lib/s3/right_s3_interface.rb', line 120

def fetch_request_params(headers) #:nodoc:
    # default server to use
    server  = @params[:server]
    service = @params[:service].to_s
    service.chop! if service[%r{/$}] # remove trailing '/' from service
    # extract bucket name and check it's dns compartibility
    headers[:url].to_s[%r{^([a-z0-9._-]*)(/[^?]*)?(\?.+)?}i]
    bucket_name, key_path, params_list = $1, $2, $3
    # select request model
    if is_dns_bucket?(bucket_name)
        # fix a path
        server = "#{bucket_name}.#{server}"
        key_path ||= '/'
        path = "#{service}#{key_path}#{params_list}"
    else
        path = "#{service}/#{bucket_name}#{key_path}#{params_list}"
    end
    path_to_sign = "#{service}/#{bucket_name}#{key_path}#{params_list}"
#      path_to_sign = "/#{bucket_name}#{key_path}#{params_list}"
    [server, path, path_to_sign]
end

#force_delete_bucket(bucket) ⇒ Object

Deletes all keys in bucket then deletes bucket. Returns true or an exception.

s3.force_delete_bucket('my_awesome_bucket')


783
784
785
786
787
788
# File 'lib/s3/right_s3_interface.rb', line 783

def force_delete_bucket(bucket)
    clear_bucket(bucket)
    delete_bucket(bucket)
rescue
    on_exception
end

Generates link for QUERY API



823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
# File 'lib/s3/right_s3_interface.rb', line 823

def generate_link(method, headers={}, expires=nil) #:nodoc:
    # calculate request data
    server, path, path_to_sign = fetch_request_params(headers)
    # expiration time
    expires ||= DEFAULT_EXPIRES_AFTER
    expires   = Time.now.utc + expires if expires.is_a?(Fixnum) && (expires < ONE_YEAR_IN_SECONDS)
    expires   = expires.to_i
    # remove unset(==optional) and symbolyc keys
    headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
    #generate auth strings
    auth_string = canonical_string(method, path_to_sign, headers, expires)
    signature   = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
    # path building
    addon = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
    path += path[/\?/] ? "&#{addon}" : "?#{addon}"
    "#{@params[:protocol]}://#{server}:#{@params[:port]}#{path}"
rescue
    on_exception
end

#generate_rest_request(method, headers) ⇒ Object

Generates request hash for REST API. Assumes that headers is URL encoded (use CGI::escape)



144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
# File 'lib/s3/right_s3_interface.rb', line 144

def generate_rest_request(method, headers) # :nodoc:
    # calculate request data
    server, path, path_to_sign = fetch_request_params(headers)
    data = headers[:data]
    # remove unset(==optional) and symbolyc keys
    headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
    #
    headers['content-type'] ||= ''
    headers['date']           = Time.now.httpdate
    # create request
    request      = "Net::HTTP::#{method.capitalize}".constantize.new(path)
    request.body = data if data
    # set request headers and meta headers
    headers.each { |key, value| request[key.to_s] = value }
    #generate auth strings
    auth_string = canonical_string(request.method, path_to_sign, request.to_hash)
    signature   = AwsUtils::sign(@aws_secret_access_key, auth_string)
    # set other headers
    request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
    # prepare output hash
    {:request  => request,
     :server   => server,
     :port     => @params[:port],
     :protocol => @params[:protocol]}
end

#get(bucket, key, headers = {}, &block) ⇒ Object

Retrieves object data from Amazon. Returns a hash or an exception.

s3.get('my_awesome_bucket', 'log/curent/1.log') #=>

    {:object  => "Ola-la!",
     :headers => {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
                  "content-type"      => "",
                  "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
                  "date"              => "Wed, 23 May 2007 09:08:03 GMT",
                  "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
                  "x-amz-meta-family" => "Woho556!",
                  "x-amz-request-id"  => "0000000C246D770C",
                  "server"            => "AmazonS3",
                  "content-length"    => "7"}}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned. Example: foo = File.new(‘./chunder.txt’, File::CREAT|File::RDWR) rhdr = s3.get(‘aws-test’, ‘Cent5V1_7_1.img.part.00’) do |chunk|

foo.write(chunk)

end foo.close



521
522
523
524
525
526
# File 'lib/s3/right_s3_interface.rb', line 521

def get(bucket, key, headers={}, &block)
    req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
    request_info(req_hash, S3HttpResponseBodyParser.new, &block)
rescue
    on_exception
end

#get_acl(bucket, key = '', headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.

s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
  {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
                "content-type"=>"application/xml;charset=ISO-8859-1",
                "date"=>"Wed, 23 May 2007 09:40:16 GMT",
                "x-amz-request-id"=>"B183FA7AB5FBB4DD",
                "server"=>"AmazonS3",
                "transfer-encoding"=>"chunked"},
   :object  => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
                <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
                <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
                16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
                <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }


689
690
691
692
693
694
695
# File 'lib/s3/right_s3_interface.rb', line 689

def get_acl(bucket, key='', headers={})
    key = key.blank? ? '' : "/#{CGI::escape key}"
    req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
    request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
    on_exception
end

Generates link for ‘GetACL’.

s3.get_acl_link('my_awesome_bucket',key) #=> url string


937
938
939
940
941
# File 'lib/s3/right_s3_interface.rb', line 937

def get_acl_link(bucket, key='', headers={})
    return generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
rescue
    on_exception
end

#get_acl_parse(bucket, key = '', headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of :grantees

s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>

{ :grantees=>
  { "16...2a"=>
    { :display_name=>"root",
      :permissions=>["FULL_CONTROL"],
      :attributes=>
       { "xsi:type"=>"CanonicalUser",
         "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
   "http://acs.amazonaws.com/groups/global/AllUsers"=>
     { :display_name=>"AllUsers",
       :permissions=>["READ"],
       :attributes=>
        { "xsi:type"=>"Group",
          "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
 :owner=>
   { :id=>"16..2a",
     :display_name=>"root"}}


719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
# File 'lib/s3/right_s3_interface.rb', line 719

def get_acl_parse(bucket, key='', headers={})
    key = key.blank? ? '' : "/#{CGI::escape key}"
    req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
    acl = request_info(req_hash, S3AclParser.new(:logger => @logger))
    result = {}
    result[:owner]    = acl[:owner]
    result[:grantees] = {}
    acl[:grantees].each do |grantee|
        key = grantee[:id] || grantee[:uri]
        if result[:grantees].key?(key)
            result[:grantees][key][:permissions] << grantee[:permissions]
        else
            result[:grantees][key] =
                    {:display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
                     :permissions  => grantee[:permissions].lines.to_a,
                     :attributes   => grantee[:attributes]}
        end
    end
    result
rescue
    on_exception
end

#get_bucket_acl(bucket, headers = {}) ⇒ Object

Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.



752
753
754
755
756
# File 'lib/s3/right_s3_interface.rb', line 752

def get_bucket_acl(bucket, headers={})
    return get_acl(bucket, '', headers)
rescue
    on_exception
end

Generates link for ‘GetBucketACL’.

s3.get_acl_link('my_awesome_bucket',key) #=> url string


957
958
959
960
961
# File 'lib/s3/right_s3_interface.rb', line 957

def get_bucket_acl_link(bucket, headers={})
    return get_acl_link(bucket, '', headers)
rescue
    on_exception
end

Generates link for ‘GetObject’.

if a bucket comply with virtual hosting naming then retuns a link with the bucket as a part of host name:

s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...

otherwise returns an old style link (the bucket is a part of path):

s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...

see docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html



906
907
908
909
910
# File 'lib/s3/right_s3_interface.rb', line 906

def get_link(bucket, key, expires=nil, headers={})
    generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
rescue
    on_exception
end

#get_logging_parse(params) ⇒ Object

Retrieves the logging configuration for a bucket. Returns a hash of :targetbucket, :targetprefix

s3.interface.get_logging_parse(:bucket => “asset_bucket”)

=> {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}


234
235
236
237
238
239
240
241
242
# File 'lib/s3/right_s3_interface.rb', line 234

def get_logging_parse(params)
    AwsUtils.mandatory_arguments([:bucket], params)
    AwsUtils.allow_only([:bucket, :headers], params)
    params[:headers] = {} unless params[:headers]
    req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
    request_info(req_hash, S3LoggingParser.new)
rescue
    on_exception
end

#get_object(bucket, key, headers = {}) ⇒ Object

Retrieves object data only (headers are omitted). Returns string or an exception.

s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'


812
813
814
815
816
# File 'lib/s3/right_s3_interface.rb', line 812

def get_object(bucket, key, headers={})
    get(bucket, key, headers)[:object]
rescue
    on_exception
end

#head(bucket, key, headers = {}) ⇒ Object

Retrieves object metadata. Returns a hash of http_response_headers.

s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
  {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
   "content-type"      => "",
   "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
   "date"              => "Wed, 23 May 2007 09:08:03 GMT",
   "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
   "x-amz-meta-family" => "Woho556!",
   "x-amz-request-id"  => "0000000C246D770C",
   "server"            => "AmazonS3",
   "content-length"    => "7"}


607
608
609
610
611
612
# File 'lib/s3/right_s3_interface.rb', line 607

def head(bucket, key, headers={})
    req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
    request_info(req_hash, S3HttpResponseHeadParser.new)
rescue
    on_exception
end

Generates link for ‘HeadObject’.

s3.head_link('my_awesome_bucket',key) #=> url string


916
917
918
919
920
# File 'lib/s3/right_s3_interface.rb', line 916

def head_link(bucket, key, expires=nil, headers={})
    generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
rescue
    on_exception
end

#incrementally_list_bucket(bucket, options = {}, headers = {}, &block) ⇒ Object

Incrementally list the contents of a bucket. Yields the following hash to a block:

s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
 {
   :name => 'bucketname',
   :prefix => 'subfolder/',
   :marker => 'fileN.jpg',
   :max_keys => 234,
   :delimiter => '/',
   :is_truncated => true,
   :next_marker => 'fileX.jpg',
   :contents => [
     { :key => "file1",
       :last_modified => "2007-05-18T07:00:59.000Z",
       :e_tag => "000000000059075b964b07152d234b70",
       :size => 3,
       :storage_class => "STANDARD",
       :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
       :owner_display_name => "root"
     }, { :key, ...}, ... {:key, ...}
   ]
   :common_prefixes => [
     "prefix1",
     "prefix2",
     ...,
     "prefixN"
   ]
 }


322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
# File 'lib/s3/right_s3_interface.rb', line 322

def incrementally_list_bucket(bucket, options={}, headers={}, &block)
    internal_options = options.symbolize_keys
    begin
        internal_bucket = bucket.dup
        internal_bucket  += '?'+internal_options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless internal_options.blank?
        req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
        response = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
        there_are_more_keys = response[:is_truncated]
        if (there_are_more_keys)
            internal_options[:marker] = decide_marker(response)
            total_results = response[:contents].length + response[:common_prefixes].length
            internal_options[:'max-keys'] ? (internal_options[:'max-keys'] -= total_results) : nil
        end
        yield response
    end while there_are_more_keys && under_max_keys(internal_options)
    true
rescue
    on_exception
end

#is_dns_bucket?(bucket_name) ⇒ Boolean

Returns:

  • (Boolean)


111
112
113
114
115
116
117
118
# File 'lib/s3/right_s3_interface.rb', line 111

def is_dns_bucket?(bucket_name)
    bucket_name = bucket_name.to_s
    return nil unless (3..63) === bucket_name.size
    bucket_name.split('.').each do |component|
        return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
    end
    true
end

#list_all_my_buckets(headers = {}) ⇒ Object

Returns an array of customer’s buckets. Each item is a hash.

s3.list_all_my_buckets #=>
  [{:owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
    :owner_display_name => "root",
    :name               => "bucket_name",
    :creation_date      => "2007-04-19T18:47:43.000Z"}, ..., {...}]


186
187
188
189
190
191
# File 'lib/s3/right_s3_interface.rb', line 186

def list_all_my_buckets(headers={})
    req_hash = generate_rest_request('GET', headers.merge(:url=>''))
    request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
rescue
    on_exception
end

Generates link for ‘ListAllMyBuckets’.

s3.list_all_my_buckets_link #=> url string


847
848
849
850
851
# File 'lib/s3/right_s3_interface.rb', line 847

def list_all_my_buckets_link(expires=nil, headers={})
    generate_link('GET', headers.merge(:url=>''), expires)
rescue
    on_exception
end

#list_bucket(bucket, options = {}, headers = {}) ⇒ Object

Returns an array of bucket’s keys. Each array item (key data) is a hash.

s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
  [{:key                => "test1",
    :last_modified      => "2007-05-18T07:00:59.000Z",
    :owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
    :owner_display_name => "root",
    :e_tag              => "000000000059075b964b07152d234b70",
    :storage_class      => "STANDARD",
    :size               => 3,
    :service=> {'is_truncated' => false,
                'prefix'       => "t",
                'marker'       => "",
                'name'         => "my_awesome_bucket",
                'max-keys'     => "5"}, ..., {...}]


287
288
289
290
291
292
293
# File 'lib/s3/right_s3_interface.rb', line 287

def list_bucket(bucket, options={}, headers={})
    bucket  += '?'+options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless options.blank?
    req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
    request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
rescue
    on_exception
end

Generates link for ‘ListBucket’.

s3.list_bucket_link('my_awesome_bucket') #=> url string


877
878
879
880
881
882
# File 'lib/s3/right_s3_interface.rb', line 877

def list_bucket_link(bucket, options=nil, expires=nil, headers={})
    bucket += '?' + options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless options.blank?
    generate_link('GET', headers.merge(:url=>bucket), expires)
rescue
    on_exception
end

#move(src_bucket, src_key, dest_bucket, dest_key = nil, directive = :copy, headers = {}) ⇒ Object

Move an object.

directive: :copy    - copy meta-headers from source (default value)
           :replace - replace meta-headers by passed ones

# move bucket1/key1 to bucket1/key2
s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}

# move bucket1/key1 to bucket2/key2 with new meta-headers assignment
s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}


658
659
660
661
662
663
# File 'lib/s3/right_s3_interface.rb', line 658

def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
    copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
    # delete an original key if it differs from a destination one
    delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
    copy_result
end

#put(bucket, key, data = nil, headers = {}) ⇒ Object

Saves object to Amazon. Returns true or an exception. Any header starting with AMAZON_METADATA_PREFIX is considered user metadata. It will be stored with the object and returned when you retrieve the object. The total size of the HTTP request, not including the body, must be less than 4 KB.

s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true

This method is capable of ‘streaming’ uploads; that is, it can upload data from a file or other IO object without first reading all the data into memory. This is most useful for large PUTs - it is difficult to read a 2 GB file entirely into memory before sending it to S3. To stream an upload, pass an object that responds to ‘read’ (like the read method of IO) and to either ‘lstat’ or ‘size’. For files, this means streaming is enabled by simply making the call:

s3.put(bucket_name, 'S3keyname.forthisfile',  File.open('localfilename.dat'))

If the IO object you wish to stream from responds to the read method but doesn’t implement lstat or size, you can extend the object dynamically to implement these methods, or define your own class which defines these methods. Be sure that your class returns ‘nil’ from read() after having read ‘size’ bytes. Otherwise S3 will drop the socket after ‘Content-Length’ bytes have been uploaded, and HttpConnection will interpret this as an error.

This method now supports very large PUTs, where very large is > 2 GB.

For Win32 users: Files and IO objects should be opened in binary mode. If a text mode IO object is passed to PUT, it will be converted to binary mode.



397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
# File 'lib/s3/right_s3_interface.rb', line 397

def put(bucket, key, data=nil, headers={})
    # On Windows, if someone opens a file in text mode, we must reset it so
    # to binary mode for streaming to work properly
    if (data.respond_to?(:binmode))
        data.binmode
    end
    data_size = data.respond_to?(:lstat) ? data.lstat.size :
            (data.respond_to?(:size) ? data.size : 0)
     if (data_size >= USE_100_CONTINUE_PUT_SIZE)
        headers['expect'] = '100-continue'
    end
    req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data,
                                                          'Content-Length' => data_size.to_s))
    request_info(req_hash, RightHttp2xxParser.new)
rescue
    on_exception
end

#put_acl(bucket, key, acl_xml_doc, headers = {}) ⇒ Object

Sets the ACL on a bucket or object.



743
744
745
746
747
748
749
# File 'lib/s3/right_s3_interface.rb', line 743

def put_acl(bucket, key, acl_xml_doc, headers={})
    key = key.blank? ? '' : "/#{CGI::escape key}"
    req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
    request_info(req_hash, S3HttpResponseBodyParser.new)
rescue
    on_exception
end

Generates link for ‘PutACL’.

s3.put_acl_link('my_awesome_bucket',key) #=> url string


947
948
949
950
951
# File 'lib/s3/right_s3_interface.rb', line 947

def put_acl_link(bucket, key='', headers={})
    return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
rescue
    on_exception
end

#put_bucket_acl(bucket, acl_xml_doc, headers = {}) ⇒ Object

Sets the ACL on a bucket only.



759
760
761
762
763
# File 'lib/s3/right_s3_interface.rb', line 759

def put_bucket_acl(bucket, acl_xml_doc, headers={})
    return put_acl(bucket, '', acl_xml_doc, headers)
rescue
    on_exception
end

Generates link for ‘PutBucketACL’.

s3.put_acl_link('my_awesome_bucket',key) #=> url string


967
968
969
970
971
# File 'lib/s3/right_s3_interface.rb', line 967

def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
    return put_acl_link(bucket, '', acl_xml_doc, headers)
rescue
    on_exception
end

Generates link for ‘PutObject’.

s3.put_link('my_awesome_bucket',key, object) #=> url string


888
889
890
891
892
# File 'lib/s3/right_s3_interface.rb', line 888

def put_link(bucket, key, data=nil, expires=nil, headers={})
    generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}", :data=>data), expires)
rescue
    on_exception
end

#put_logging(params) ⇒ Object

Sets logging configuration for a bucket from the XML configuration document.

params:
 :bucket
 :xmldoc


248
249
250
251
252
253
254
255
256
# File 'lib/s3/right_s3_interface.rb', line 248

def put_logging(params)
    AwsUtils.mandatory_arguments([:bucket, :xmldoc], params)
    AwsUtils.allow_only([:bucket, :xmldoc, :headers], params)
    params[:headers] = {} unless params[:headers]
    req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
    request_info(req_hash, S3TrueParser.new)
rescue
    on_exception
end

#rename(src_bucket, src_key, dest_key, headers = {}) ⇒ Object

Rename an object.

# rename bucket1/key1 to bucket1/key2
s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}


670
671
672
# File 'lib/s3/right_s3_interface.rb', line 670

def rename(src_bucket, src_key, dest_key, headers={})
    move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
end

#request_info(request, parser, &block) ⇒ Object

Sends request to Amazon and parses the response. Raises AwsError if any banana happened.



172
173
174
175
# File 'lib/s3/right_s3_interface.rb', line 172

def request_info(request, parser, &block) # :nodoc:
    request_info2(request, parser, @params, :s3_connection, @logger, @@bench, &block)

end

#retrieve_object(params, &block) ⇒ Object

New experimental API for retrieving objects, introduced in Aws 1.8.1. retrieve_object is similar in function to the older function get. It allows for optional verification of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.

If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values. Mandatory arguments:

:bucket - the bucket in which the object is stored
:key    - the object address (or path) within the bucket

Optional arguments:

 :headers - hash of additional HTTP headers to include with the request
 :md5     - MD5 checksum against which to verify the retrieved object

s3.retrieve_object(:bucket => "foobucket", :key => "foo")
  => {:verified_md5=>false,
      :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                 "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
                 "content-type"=>"",
                 "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                 "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
                 "x-amz-request-id"=>"EE4855DE27A2688C",
                 "server"=>"AmazonS3",
                 "content-length"=>"10"},
      :object=>"polemonium"}

s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
  => {:verified_md5=>true,
      :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                 "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
                 "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                 "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
                 "x-amz-request-id"=>"6E7F317356580599",
                 "server"=>"AmazonS3",
                 "content-length"=>"10"},
      :object=>"polemonium"}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned.



569
570
571
572
573
574
575
576
577
578
579
580
581
582
# File 'lib/s3/right_s3_interface.rb', line 569

def retrieve_object(params, &block)
    AwsUtils.mandatory_arguments([:bucket, :key], params)
    AwsUtils.allow_only([:bucket, :key, :headers, :md5], params)
    params[:headers] = {} unless params[:headers]
    req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
    resp = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
    resp[:verified_md5] = false
    if (params[:md5] && (resp[:headers]['etag'].gsub(/\"/, '') == params[:md5]))
        resp[:verified_md5] = true
    end
    resp
rescue
    on_exception
end

#retrieve_object_and_verify(params, &block) ⇒ Object

Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the “verified_md5” field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.

Raises:



587
588
589
590
591
592
# File 'lib/s3/right_s3_interface.rb', line 587

def retrieve_object_and_verify(params, &block)
    AwsUtils.mandatory_arguments([:md5], params)
    resp = retrieve_object(params, &block)
    return resp if resp[:verified_md5]
    raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
end

#store_object(params) ⇒ Object

New experimental API for uploading objects, introduced in Aws 1.8.1. store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments. The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).

If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values.

s3.store_object(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2954”, :data => “polemonium” )

=> {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
    :verified_md5=>true,
    "x-amz-request-id"=>"63916465939995BA",
    "server"=>"AmazonS3",
    "content-length"=>"0"}

s3.store_object(:bucket => “foobucket”, :key => “foo”, :data => “polemonium” )

=> {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
    :verified_md5=>false,
    "x-amz-request-id"=>"3B25A996BC2CDD3B",
    "server"=>"AmazonS3",
    "content-length"=>"0"}


444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
# File 'lib/s3/right_s3_interface.rb', line 444

def store_object(params)
    AwsUtils.allow_only([:bucket, :key, :data, :headers, :md5], params)
    AwsUtils.mandatory_arguments([:bucket, :key, :data], params)
    params[:headers] = {} unless params[:headers]

    params[:data].binmode if (params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
    if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
            (params[:data].respond_to?(:size) && params[:data].size >= USE_100_CONTINUE_PUT_SIZE)
        params[:headers]['expect'] = '100-continue'
    end

    req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
    resp = request_info(req_hash, S3HttpResponseHeadParser.new)
    if (params[:md5])
        resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
    else
        resp[:verified_md5] = false
    end
    resp
rescue
    on_exception
end

#store_object_and_verify(params) ⇒ Object

Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the “verified_md5” field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.

s3.store_object_and_verify(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2954”, :data => “polemonium” )

=> {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
    "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
    "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
    :verified_md5=>true,
    "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
    "server"=>"AmazonS3",
    "content-length"=>"0"}

s3.store_object_and_verify(:bucket => “foobucket”, :key => “foo”, :md5 => “a507841b1bc8115094b00bbe8c1b2953”, :data => “polemonium” )

Aws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
                                                                       "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                                                                       "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
                                                                       :verified_md5=>false,
                                                                       "x-amz-request-id"=>"0D7ADE09F42606F2",
                                                                       "server"=>"AmazonS3",
                                                                       "content-length"=>"0"}


488
489
490
491
492
# File 'lib/s3/right_s3_interface.rb', line 488

def store_object_and_verify(params)
    AwsUtils.mandatory_arguments([:md5], params)
    r = store_object(params)
    r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
end