Class: Datapimp::Sync::S3Bucket

Inherits:
Hashie::Mash
  • Object
show all
Includes:
Logging
Defined in:
lib/datapimp/sync/s3_bucket.rb

Instance Method Summary collapse

Methods included from Logging

#log, #logger, #logger=

Instance Method Details

#asset_fingerprintsObject



71
72
73
# File 'lib/datapimp/sync/s3_bucket.rb', line 71

def asset_fingerprints
  deploy_manifest['asset_fingerprints'] ||= {}
end

#build_deploy_manifest_from_remoteObject



51
52
53
54
# File 'lib/datapimp/sync/s3_bucket.rb', line 51

def build_deploy_manifest_from_remote
  # TODO
  # Implement
end

#cloudfrontObject



21
22
23
# File 'lib/datapimp/sync/s3_bucket.rb', line 21

def cloudfront
  @cloudfront ||= Datapimp::Sync::CloudfrontDistribution.new(bucket: remote)
end

#deploy_manifestObject



47
48
49
# File 'lib/datapimp/sync/s3_bucket.rb', line 47

def deploy_manifest
  @deploy_manifest ||= (JSON.parse(deploy_manifest_path.read) || {} rescue {})
end

#deploy_manifest_pathObject



41
42
43
44
45
# File 'lib/datapimp/sync/s3_bucket.rb', line 41

def deploy_manifest_path
  Datapimp.config.deploy_manifests_path
    .tap {|p| FileUtils.mkdir_p(p) }
    .join(remote.to_s.parameterize + '.json')
end

#local_pathObject



37
38
39
# File 'lib/datapimp/sync/s3_bucket.rb', line 37

def local_path
  Pathname(local)
end

#prepare_manifest_for(entries) ⇒ Object

builds a manifest of MD5 hashes for each file so that we aren’t deploying stuff which is the same since last time we deployed



59
60
61
# File 'lib/datapimp/sync/s3_bucket.rb', line 59

def prepare_manifest_for(entries)
   deploy_manifest
end

#run(action, options = {}) ⇒ Object



169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
# File 'lib/datapimp/sync/s3_bucket.rb', line 169

def run(action, options={})
  action = action.to_sym

  if action == :push
    run_push_action(options)
  elsif action == :create
    run_create_action(options)
  elsif action == :update_acl
    run_update_acl_action(options)
  elsif action == :pull
    run_pull_action(options)
  elsif action == :reset
    run_reset_options(options)
  end
end

#run_create_action(options = {}) ⇒ Object



153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
# File 'lib/datapimp/sync/s3_bucket.rb', line 153

def run_create_action(options={})
  directories = Datapimp::Sync.amazon.storage.directories

  make_private = !!options[:make_private]

  bucket = if existing = directories.get(remote)
    existing
  else
    directories.create(key:remote, public: !make_private)
  end

  storage.put_bucket_website(remote, :IndexDocument => 'index.html', :ErrorDocument => 'error.html')

  bucket
end

#run_pull_action(options = {}) ⇒ Object



129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# File 'lib/datapimp/sync/s3_bucket.rb', line 129

def run_pull_action(options={})
  directories = Datapimp::Sync.amazon.storage.directories
  bucket = directories.get(remote)
  options = options.to_mash

  if options.reset == true
    FileUtils.rm_rf(local_path)
    FileUtils.mkdir_p(local_path)
  end

  bucket.files.each do |file|
    local_file = local_path.join(file.key)

    if local_file.exist? && file.etag == Digest::MD5.hexdigest(local_file.read)
      log "Skipping #{ file.key }"
      next
    end

    FileUtils.mkdir_p(local_file.dirname)

    local_file.open("w+") {|fh| fh.write(file.body); log("Updated #{ file.key }"); }
  end
end

#run_push_action(options = {}) ⇒ Object



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# File 'lib/datapimp/sync/s3_bucket.rb', line 75

def run_push_action(options={})
  require 'rack' unless defined?(::Rack)
  entries = Dir[local_path.join('**/*')].map(&:to_pathname)
  prepare_manifest_for(entries)

  entries.reject! { |entry| entry.to_s.match(/\.DS_Store/) }
  entries.reject!(&:directory?)

  uploaded = deploy_manifest['uploaded'] = []

  entries.each do |entry|
    destination = entry.relative_path_from(local_path).to_s.without_leading_slash
    fingerprint = Digest::MD5.hexdigest(entry.read)

    if asset_fingerprints[destination] == fingerprint
      #log "Skipping #{ destination }: found in manifest"
      next
    end

    content_type = Rack::Mime.mime_type(File.extname(destination.split("/").last))

    if existing = s3.files.get(destination)
      if existing.etag == fingerprint
        log "Skipping #{ destination }: similar etag"
      else
        existing.body = entry.read
        existing.acl = options.acl || 'public-read'
        existing.content_type = content_type
        log "Updated #{ destination }; content-type: #{ content_type }"
        uploaded << destination
        existing.save
      end
    else
      log "Uploaded #{ destination }; content-type: #{ content_type }"
      s3.files.create(key: destination, body: entry.read, acl: 'public-read', content_type: content_type)
      uploaded << destination
    end

    asset_fingerprints[destination] = fingerprint
  end

  if count == 0
    return
  end

  log "Saving deploy manifest. #{ deploy_manifest.keys.length } entries"
  deploy_manifest_path.open("w+") {|fh| fh.write(deploy_manifest.to_json) }
end

#run_reset_action(options = {}) ⇒ Object



124
125
126
127
# File 'lib/datapimp/sync/s3_bucket.rb', line 124

def run_reset_action(options={})
  bucket = directories.get(remote)
  bucket.files.each {|f| key = f.key; f.delete rescue nil; f.destroy rescue nil; log "Deleting #{ key }"}
end

#run_update_acl_action(options = {}) ⇒ Object



63
64
65
66
67
68
69
# File 'lib/datapimp/sync/s3_bucket.rb', line 63

def run_update_acl_action(options={})
  s3.files.each do |file|
    file.acl = 'public-read'
    file.save
    log "Updated acl for #{ file.key } to public-read"
  end
end

#s3Object

returns the s3 bucket via fog



7
8
9
10
11
12
13
14
15
16
17
18
19
# File 'lib/datapimp/sync/s3_bucket.rb', line 7

def s3
  @s3 ||= storage.directories.get(remote).tap do |bucket|
    if setup_website == true
      bucket.public = true
      bucket.save
      storage.put_bucket_website(remote, 'index.html', key: 'error.html')
    end

    if redirect == true
      log "Should be creating a redirect bucket"
    end
  end
end

#storageObject



25
26
27
# File 'lib/datapimp/sync/s3_bucket.rb', line 25

def storage
  Datapimp::Sync.amazon.storage
end

#website_hostnameObject



29
30
31
# File 'lib/datapimp/sync/s3_bucket.rb', line 29

def website_hostname
  "#{s3.key}.s3-website-#{ s3.location }.amazonaws.com"
end

#website_url(proto = "http") ⇒ Object



33
34
35
# File 'lib/datapimp/sync/s3_bucket.rb', line 33

def website_url(proto="http")
  "#{proto}://#{ website_hostname }"
end