Class: DPL::Provider::S3

Inherits:
DPL::Provider show all
Defined in:
lib/dpl/provider/s3.rb

Constant Summary collapse

DEFAULT_MAX_THREADS =
5
MAX_THREADS =
15

Instance Method Summary collapse

Instance Method Details

#access_key_idObject



34
35
36
# File 'lib/dpl/provider/s3.rb', line 34

def access_key_id
  options[:access_key_id] || context.env['AWS_ACCESS_KEY_ID'] || raise(Error, "missing access_key_id")
end

#apiObject



14
15
16
# File 'lib/dpl/provider/s3.rb', line 14

def api
  @api ||= ::Aws::S3::Resource.new(s3_options)
end

#check_appObject



27
28
# File 'lib/dpl/provider/s3.rb', line 27

def check_app
end

#check_authObject



60
61
62
# File 'lib/dpl/provider/s3.rb', line 60

def check_auth
  log "Logging in with Access Key: #{access_key_id[-4..-1].rjust(20, '*')}"
end

#deployObject



123
124
125
126
127
128
129
130
131
# File 'lib/dpl/provider/s3.rb', line 123

def deploy
  super
rescue ::Aws::S3::Errors::InvalidAccessKeyId
  raise Error, "Invalid S3 Access Key Id, Stopping Deploy"
rescue ::Aws::S3::Errors::ChecksumError
  raise Error, "Aws Secret Key does not match Access Key Id, Stopping Deploy"
rescue ::Aws::S3::Errors::AccessDenied
  raise Error, "Oops, It looks like you tried to write to a bucket that isn't yours or doesn't exist yet. Please create the bucket before trying to write to it."
end

#max_threadsObject



18
19
20
21
22
23
24
25
# File 'lib/dpl/provider/s3.rb', line 18

def max_threads
  return @max_threads if @max_threads
  if (@max_threads = threads_wanted = options.fetch(:max_threads, DEFAULT_MAX_THREADS).to_i) > MAX_THREADS
    log "Desired thread count #{threads_wanted} is too large. Using #{MAX_THREADS}."
    @max_threads = MAX_THREADS
  end
  @max_threads
end

#needs_key?Boolean

Returns:

  • (Boolean)


30
31
32
# File 'lib/dpl/provider/s3.rb', line 30

def needs_key?
  false
end

#push_appObject



68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# File 'lib/dpl/provider/s3.rb', line 68

def push_app
  old_pwd = Dir.pwd
  cwd = options.fetch(:local_dir, Dir.pwd)
  Dir.chdir(cwd)
  glob_args = ["**/*"]
  glob_args << File::FNM_DOTMATCH if options[:dot_match]
  files = Dir.glob(*glob_args).reject {|f| File.directory?(f)}
  upload_multithreaded(files)

  if suffix = options[:index_document_suffix]
    api.bucket(option(:bucket)).website.put(
      website_configuration: {
        index_document: {
          suffix: suffix
        }
      }
    )
  end

  Dir.chdir(old_pwd)
end

#s3_optionsObject



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
# File 'lib/dpl/provider/s3.rb', line 42

def s3_options
  defaults = {
    region:      options[:region] || 'us-east-1',
    credentials: ::Aws::Credentials.new(access_key_id, secret_access_key)
  }

  if options[:endpoint]
    uri = URI.parse(options[:endpoint])
    unless uri.scheme
      log "S3 endpoint does not specify scheme; defaulting to HTTPS"
      uri = URI("https://#{options[:endpoint]}")
    end
    defaults[:endpoint] = uri.to_s
  end

  defaults
end

#secret_access_keyObject



38
39
40
# File 'lib/dpl/provider/s3.rb', line 38

def secret_access_key
  options[:secret_access_key] || context.env['AWS_SECRET_ACCESS_KEY'] || raise(Error, "missing secret_access_key")
end

#upload_multithreaded(files) ⇒ Object



90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# File 'lib/dpl/provider/s3.rb', line 90

def upload_multithreaded(files)
  file_number = 0
  mutex = Mutex.new
  threads = []
  log "Beginning upload of #{files.length} files with #{max_threads} threads."

  max_threads.times do |i|
    threads[i] = Thread.new {
      until files.empty?
        mutex.synchronize do
          file_number += 1
          Thread.current["file_number"] = file_number
        end
        filename = files.pop rescue nil
        next unless filename

        opts  = content_data_for(filename)
        opts[:cache_control]          = get_option_value_by_filename(options[:cache_control], filename) if options[:cache_control]
        opts[:acl]                    = options[:acl].gsub(/_/, '-') if options[:acl]
        opts[:expires]                = get_option_value_by_filename(options[:expires], filename) if options[:expires]
        opts[:storage_class]          = options[:storage_class] if options[:storage_class]
        opts[:server_side_encryption] = "AES256" if options[:server_side_encryption]
        unless File.directory?(filename)
          log "uploading #{filename.inspect} with #{opts.inspect}"
          result = api.bucket(option(:bucket)).object(upload_path(filename)).upload_file(filename, opts)
          warn "error while uploading #{filename.inspect}" unless result
        end
      end
    }
  end
  threads.each { |t| t.join }
end

#upload_path(filename) ⇒ Object



64
65
66
# File 'lib/dpl/provider/s3.rb', line 64

def upload_path(filename)
  [options[:upload_dir], filename].compact.join("/")
end