Class: FakeS3::FileStore

Inherits:
Object
  • Object
show all
Defined in:
lib/fakes3/file_store.rb

Constant Summary collapse

FAKE_S3_METADATA_DIR =
".fakes3_metadataFFF"
SUBSECOND_PRECISION =

S3 clients with overly strict date parsing fails to parse ISO 8601 dates without any sub second precision (e.g. jets3t v0.7.2), and the examples given in the official AWS S3 documentation specify three (3) decimals for sub second precision.

3

Instance Method Summary collapse

Constructor Details

#initialize(root, quiet_mode) ⇒ FileStore

Returns a new instance of FileStore.



19
20
21
22
23
24
25
26
27
28
29
30
# File 'lib/fakes3/file_store.rb', line 19

def initialize(root, quiet_mode)
  @root = root
  @buckets = []
  @bucket_hash = {}
  @quiet_mode = quiet_mode
  Dir[File.join(root,"*")].each do |bucket|
    bucket_name = File.basename(bucket)
    bucket_obj = Bucket.new(bucket_name,Time.now,[])
    @buckets << bucket_obj
    @bucket_hash[bucket_name] = bucket_obj
  end
end

Instance Method Details

#bucketsObject



51
52
53
# File 'lib/fakes3/file_store.rb', line 51

def buckets
  @buckets
end

#combine_object_parts(bucket, upload_id, object_name, parts, request) ⇒ Object



229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
# File 'lib/fakes3/file_store.rb', line 229

def combine_object_parts(bucket, upload_id, object_name, parts, request)
  upload_path   = File.join(@root, bucket.name)
  base_path     = File.join(upload_path, "#{upload_id}_#{object_name}")

  complete_file = ""
  chunk         = ""
  part_paths    = []

  parts.sort_by { |part| part[:number] }.each do |part|
    part_path    = "#{base_path}_part#{part[:number]}"
    content_path = File.join(part_path, FAKE_S3_METADATA_DIR, 'content')

    File.open(content_path, 'rb') { |f| chunk = f.read }
    etag = Digest::MD5.hexdigest(chunk)

    raise new Error "invalid file chunk" unless part[:etag] == etag
    complete_file << chunk
    part_paths    << part_path
  end

  object = do_store_object(bucket, object_name, complete_file, request)

  # clean up parts
  part_paths.each do |path|
    FileUtils.remove_dir(path)
  end

  object
end

#copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request) ⇒ Object



113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# File 'lib/fakes3/file_store.rb', line 113

def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
  src_root = File.join(@root,src_bucket_name,src_name,FAKE_S3_METADATA_DIR)
   = File.join(src_root, "metadata")
   = YAML.load(File.open(, 'rb').read)
  src_content_filename = File.join(src_root, "content")

  dst_filename= File.join(@root,dst_bucket_name,dst_name)
  FileUtils.mkdir_p(dst_filename)

   = File.join(dst_filename,FAKE_S3_METADATA_DIR)
  FileUtils.mkdir_p()

  content = File.join(, "content")
   = File.join(, "metadata")

  if src_bucket_name != dst_bucket_name || src_name != dst_name
    File.open(content, 'wb') do |f|
      File.open(src_content_filename, 'rb') do |input|
        f << input.read
      end
    end

    File.open(,'w') do |f|
      File.open(,'r') do |input|
        f << input.read
      end
    end
  end

   = request.header["x-amz-metadata-directive"].first
  if  == "REPLACE"
     = (content, request)
    File.open(,'w') do |f|
      f << YAML::dump()
    end
  end

  src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
  dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)

  obj = S3Object.new
  obj.name = dst_name
  obj.md5 = [:md5]
  obj.content_type = [:content_type]
  obj.content_disposition = [:content_disposition]
  obj.content_encoding = [:content_encoding] # if src_metadata[:content_encoding]
  obj.size = [:size]
  obj.modified_date = [:modified_date]

  src_bucket.find(src_name)
  dst_bucket.add(obj)
  return obj
end

#create_bucket(bucket) ⇒ Object



63
64
65
66
67
68
69
70
71
# File 'lib/fakes3/file_store.rb', line 63

def create_bucket(bucket)
  FileUtils.mkdir_p(File.join(@root, bucket))
  bucket_obj = Bucket.new(bucket, Time.now, [])
  if !@bucket_hash[bucket]
    @buckets << bucket_obj
    @bucket_hash[bucket] = bucket_obj
  end
  bucket_obj
end

#create_metadata(content, request) ⇒ Object

TODO: abstract getting meta data from request.



290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
# File 'lib/fakes3/file_store.rb', line 290

def (content, request)
   = {}
  [:md5] = Digest::MD5.file(content).hexdigest
  [:content_type] = request.header["content-type"].first
  if request.header['content-disposition']
    [:content_disposition] = request.header['content-disposition'].first
  end
  content_encoding = request.header["content-encoding"].first
  [:content_encoding] = content_encoding
  #if content_encoding
  #  metadata[:content_encoding] = content_encoding
  #end
  [:size] = File.size(content)
  [:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION)
  [:amazon_metadata] = {}
  [:custom_metadata] = {}

  # Add custom metadata from the request header
  request.header.each do |key, value|
    match = /^x-amz-([^-]+)-(.*)$/.match(key)
    next unless match
    if match[1].eql?('meta') && (match_key = match[2])
      [:custom_metadata][match_key] = value.join(', ')
      next
    end
    [:amazon_metadata][key.gsub(/^x-amz-/, '')] = value.join(', ')
  end
  return 
end

#delete_bucket(bucket_name) ⇒ Object

Raises:



73
74
75
76
77
78
79
# File 'lib/fakes3/file_store.rb', line 73

def delete_bucket(bucket_name)
  bucket = get_bucket(bucket_name)
  raise NoSuchBucket if !bucket
  raise BucketNotEmpty if bucket.objects.count > 0
  FileUtils.rm_r(get_bucket_folder(bucket))
  @bucket_hash.delete(bucket_name)
end

#delete_object(bucket, object_name, request) ⇒ Object



259
260
261
262
263
264
265
266
267
268
269
270
# File 'lib/fakes3/file_store.rb', line 259

def delete_object(bucket,object_name,request)
  begin
    filename = File.join(@root,bucket.name,object_name)
    FileUtils.rm_rf(filename)
    object = bucket.find(object_name)
    bucket.remove(object)
  rescue
    puts $!
    $!.backtrace.each { |line| puts line }
    return nil
  end
end

#delete_objects(bucket, objects, request) ⇒ Object



272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
# File 'lib/fakes3/file_store.rb', line 272

def delete_objects(bucket, objects, request)
  begin
    filenames = []
    objects.each do |object_name|
      filenames << File.join(@root,bucket.name,object_name)
      object = bucket.find(object_name)
      bucket.remove(object)
    end

    FileUtils.rm_rf(filenames)
  rescue
    puts $!
    $!.backtrace.each { |line| puts line }
    return nil
  end
end

#do_store_object(bucket, object_name, filedata, request) ⇒ Object



191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
# File 'lib/fakes3/file_store.rb', line 191

def do_store_object(bucket, object_name, filedata, request)
  begin
    filename = File.join(@root, bucket.name, object_name)
    FileUtils.mkdir_p(filename)

     = File.join(filename, FAKE_S3_METADATA_DIR)
    FileUtils.mkdir_p()

    content = File.join(filename, FAKE_S3_METADATA_DIR, "content")
     = File.join(filename, FAKE_S3_METADATA_DIR, "metadata")

    File.open(content,'wb') { |f| f << filedata }

     = (content, request)
    File.open(,'w') do |f|
      f << YAML::dump()
    end

    obj = S3Object.new
    obj.name = object_name
    obj.md5 = [:md5]
    obj.content_type = [:content_type]
    obj.content_disposition = [:content_disposition]
    obj.content_encoding = [:content_encoding] # if metadata_struct[:content_encoding]
    obj.size = [:size]
    obj.modified_date = [:modified_date]

    bucket.add(obj)
    return obj
  rescue
    unless @quiet_mode
      puts $!
      $!.backtrace.each { |line| puts line }
    end
    return nil
  end
end

#get_bucket(bucket) ⇒ Object



59
60
61
# File 'lib/fakes3/file_store.rb', line 59

def get_bucket(bucket)
  @bucket_hash[bucket]
end

#get_bucket_folder(bucket) ⇒ Object



55
56
57
# File 'lib/fakes3/file_store.rb', line 55

def get_bucket_folder(bucket)
  File.join(@root, bucket.name)
end

#get_object(bucket, object_name, request) ⇒ Object



81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
# File 'lib/fakes3/file_store.rb', line 81

def get_object(bucket, object_name, request)
  begin
    real_obj = S3Object.new
    obj_root = File.join(@root,bucket,object_name,FAKE_S3_METADATA_DIR)
     = File.open(File.join(obj_root, "metadata")) { |file| YAML::load(file) }
    real_obj.name = object_name
    real_obj.md5 = [:md5]
    real_obj.content_type = request.query['response-content-type'] ||
      .fetch(:content_type) { "application/octet-stream" }
    real_obj.content_disposition = request.query['response-content-disposition'] ||
      [:content_disposition]
    real_obj.content_encoding = .fetch(:content_encoding) # if metadata.fetch(:content_encoding)
    real_obj.io = RateLimitableFile.open(File.join(obj_root, "content"), 'rb')
    real_obj.size = .fetch(:size) { 0 }
    real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION)
    real_obj.modified_date = .fetch(:modified_date) do
      File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION)
    end
    real_obj. = .fetch(:custom_metadata) { {} }
    return real_obj
  rescue
    unless @quiet_mode
      puts $!
      $!.backtrace.each { |line| puts line }
    end
    return nil
  end
end

#object_metadata(bucket, object) ⇒ Object



110
111
# File 'lib/fakes3/file_store.rb', line 110

def (bucket, object)
end

#rate_limit=(rate_limit) ⇒ Object

Pass a rate limit in bytes per second



33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/fakes3/file_store.rb', line 33

def rate_limit=(rate_limit)
  if rate_limit.is_a?(String)
    if rate_limit =~ /^(\d+)$/
      RateLimitableFile.rate_limit = rate_limit.to_i
    elsif rate_limit =~ /^(.*)K$/
      RateLimitableFile.rate_limit = $1.to_f * 1000
    elsif rate_limit =~ /^(.*)M$/
      RateLimitableFile.rate_limit = $1.to_f * 1000000
    elsif rate_limit =~ /^(.*)G$/
      RateLimitableFile.rate_limit = $1.to_f * 1000000000
    else
      raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
    end
  else
    RateLimitableFile.rate_limit = nil
  end
end

#store_object(bucket, object_name, request) ⇒ Object



167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# File 'lib/fakes3/file_store.rb', line 167

def store_object(bucket, object_name, request)
  filedata = ""

  # TODO put a tmpfile here first and mv it over at the end
  content_type = request.content_type || ""

  match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
  boundary = match[1] if match
  if boundary
    boundary = WEBrick::HTTPUtils::dequote(boundary)
    form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)

    if form_data['file'] == nil || form_data['file'] == ""
      raise WEBrick::HTTPStatus::BadRequest
    end

    filedata = form_data['file']
  else
    request.body { |chunk| filedata << chunk }
  end

  do_store_object(bucket, object_name, filedata, request)
end